Skip to content

Commit 94e1af8

Browse files
author
Yusuke Sugomori
committed
SdA.h cpp
1 parent b9ddf51 commit 94e1af8

File tree

2 files changed

+18
-19
lines changed

2 files changed

+18
-19
lines changed

cpp/SdA.cpp

+1-19
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
#include "HiddenLayer.h"
44
#include "dA.h"
55
#include "LogisticRegression.h"
6+
#include "SdA.h"
67
using namespace std;
78

89
double uniform(double min, double max) {
@@ -28,25 +29,6 @@ double sigmoid(double x) {
2829
}
2930

3031

31-
32-
class SdA {
33-
34-
public:
35-
int N;
36-
int n_ins;
37-
int *hidden_layer_sizes;
38-
int n_outs;
39-
int n_layers;
40-
HiddenLayer **sigmoid_layers;
41-
dA **dA_layers;
42-
LogisticRegression *log_layer;
43-
SdA(int, int, int*, int, int);
44-
~SdA();
45-
void pretrain(int*, double, double, int);
46-
void finetune(int*, int*, double, int);
47-
void predict(int*, double*);
48-
};
49-
5032
// SdA
5133
SdA::SdA(int size, int n_i, int *hls, int n_o, int n_l) {
5234
int input_size;

cpp/SdA.h

+17
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
class SdA {
2+
3+
public:
4+
int N;
5+
int n_ins;
6+
int *hidden_layer_sizes;
7+
int n_outs;
8+
int n_layers;
9+
HiddenLayer **sigmoid_layers;
10+
dA **dA_layers;
11+
LogisticRegression *log_layer;
12+
SdA(int, int, int*, int, int);
13+
~SdA();
14+
void pretrain(int*, double, double, int);
15+
void finetune(int*, int*, double, int);
16+
void predict(int*, double*);
17+
};

0 commit comments

Comments
 (0)