Skip to content

Commit c291e7e

Browse files
author
Yusuke Sugomori
committed
minor fix
1 parent 04fe276 commit c291e7e

File tree

5 files changed

+40
-9
lines changed

5 files changed

+40
-9
lines changed

.gitignore

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,35 @@
11
.DS_Store
22
*.pyc
33
*.out
4-
*.o
4+
*.o
5+
6+
#
7+
# from https://github.com/github/gitignore/blob/master/Global/Eclipse.gitignore
8+
#
9+
10+
*.pydevproject
11+
.project
12+
.metadata
13+
bin/**
14+
tmp/**
15+
tmp/**/*
16+
*.tmp
17+
*.bak
18+
*.swp
19+
*~.nib
20+
local.properties
21+
.classpath
22+
.settings/
23+
.loadpath
24+
25+
# External tool builders
26+
.externalToolBuilders/
27+
28+
# Locally stored "Eclipse launch configurations"
29+
*.launch
30+
31+
# CDT-specific
32+
.cproject
33+
34+
# PDT-specific
35+
.buildpath

c/DBN.c

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ void DBN__construct(DBN* this, int N, \
5454
if(i == 0) {
5555
input_size = n_ins;
5656
} else {
57-
input_size = hidden_layer_sizes[i - 1];
57+
input_size = hidden_layer_sizes[i-1];
5858
}
5959

6060
// construct sigmoid_layer
@@ -135,7 +135,7 @@ void DBN_finetune(DBN* this, int *input, int *label, double lr, int epochs) {
135135
int i, j, m, n, epoch;
136136

137137
int *layer_input;
138-
int prev_layer_input_size;
138+
// int prev_layer_input_size;
139139
int *prev_layer_input;
140140

141141
int *train_X = (int *)malloc(sizeof(int) * this->n_ins);
@@ -178,7 +178,7 @@ void DBN_finetune(DBN* this, int *input, int *label, double lr, int epochs) {
178178
void DBN_predict(DBN* this, int *x, double *y) {
179179
int i, j, k;
180180
double *layer_input;
181-
int prev_layer_input_size;
181+
// int prev_layer_input_size;
182182
double *prev_layer_input;
183183

184184
double linear_output;

c/SdA.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ void SdA__construct(SdA* this, int N, \
5353
if(i == 0) {
5454
input_size = n_ins;
5555
} else {
56-
input_size = hidden_layer_sizes[i - 1];
56+
input_size = hidden_layer_sizes[i-1];
5757
}
5858

5959
// construct sigmoid_layer

cpp/DBN.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ DBN::DBN(int size, int n_i, int *hls, int n_o, int n_l) {
4848
if(i == 0) {
4949
input_size = n_ins;
5050
} else {
51-
input_size = hidden_layer_sizes[i - 1];
51+
input_size = hidden_layer_sizes[i-1];
5252
}
5353

5454
// construct sigmoid_layer
@@ -123,7 +123,7 @@ void DBN::pretrain(int *input, double lr, int k, int epochs) {
123123

124124
void DBN::finetune(int *input, int *label, double lr, int epochs) {
125125
int *layer_input;
126-
int prev_layer_input_size;
126+
// int prev_layer_input_size;
127127
int *prev_layer_input;
128128

129129
int *train_X = new int[n_ins];
@@ -164,7 +164,7 @@ void DBN::finetune(int *input, int *label, double lr, int epochs) {
164164

165165
void DBN::predict(int *x, double *y) {
166166
double *layer_input;
167-
int prev_layer_input_size;
167+
// int prev_layer_input_size;
168168
double *prev_layer_input;
169169

170170
double linear_output;

cpp/SdA.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ SdA::SdA(int size, int n_i, int *hls, int n_o, int n_l) {
4747
if(i == 0) {
4848
input_size = n_ins;
4949
} else {
50-
input_size = hidden_layer_sizes[i - 1];
50+
input_size = hidden_layer_sizes[i-1];
5151
}
5252

5353
// construct sigmoid_layer

0 commit comments

Comments
 (0)