Skip to content

Commit 0d32eaa

Browse files
author
Yusuke Sugomori
committed
c
1 parent 5b73c27 commit 0d32eaa

File tree

4 files changed

+226
-41
lines changed

4 files changed

+226
-41
lines changed

c/LogisticRegression.c

Lines changed: 160 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,160 @@
1+
#include <stdio.h>
2+
#include <stdlib.h>
3+
#include <math.h>
4+
#include "LogisticRegression.h"
5+
6+
void test_lr(void);
7+
8+
9+
void LogisticRegression__construct(LogisticRegression *this, int N, int n_in, int n_out) {
10+
int i;
11+
this->N = N;
12+
this->n_in = n_in;
13+
this->n_out = n_out;
14+
15+
this->W = (double **)malloc(sizeof(double*) * n_out);
16+
this->W[0] = (double *)malloc(sizeof(double) * n_in * n_out);
17+
for(i=0; i<n_out; i++) this->W[i] = this->W[0] + i * n_in;
18+
this->b = (double *)malloc(sizeof(double) * n_out);
19+
}
20+
21+
void LogisticRegression__destruct(LogisticRegression *this) {
22+
free(this->W);
23+
free(this->b);
24+
}
25+
26+
void LogisticRegression_train(LogisticRegression *this, int *x, int *y, double lr) {
27+
int i,j;
28+
double *p_y_given_x = (double *)malloc(sizeof(double) * this->n_out);
29+
double *dy = (double *)malloc(sizeof(double) * this->n_out);
30+
31+
for(i=0; i<this->n_out; i++) {
32+
for(j=0; j<this->n_in; j++) {
33+
p_y_given_x[i] += this->W[i][j] * x[j];
34+
}
35+
p_y_given_x[i] += this->b[i];
36+
}
37+
LogisticRegression_softmax(this, p_y_given_x);
38+
39+
for(i=0; i<this->n_out; i++) {
40+
dy[i] = y[i] - p_y_given_x[i];
41+
42+
for(j=0; j<this->n_in; j++) {
43+
this->W[i][j] += lr * dy[i] * x[j] / this->N;
44+
}
45+
46+
this->b[i] += lr * dy[i] / this->N;
47+
}
48+
49+
free(p_y_given_x);
50+
free(dy);
51+
}
52+
53+
void LogisticRegression_softmax(LogisticRegression *this, double *x) {
54+
int i;
55+
double max = 0.0;
56+
double sum = 0.0;
57+
58+
for(i=0; i<this->n_out; i++) if(max < x[i]) max = x[i];
59+
for(i=0; i<this->n_out; i++) {
60+
x[i] = exp(x[i] - max);
61+
sum += x[i];
62+
}
63+
64+
for(i=0; i<this->n_out; i++) x[i] /= sum;
65+
}
66+
67+
void LogisticRegression_predict(LogisticRegression *this, int *x, double *y) {
68+
int i,j;
69+
70+
for(i=0; i<this->n_out; i++) {
71+
for(j=0; j<this->n_in; j++) {
72+
y[i] += this->W[i][j] * x[j];
73+
}
74+
y[i] += this->b[i];
75+
}
76+
77+
LogisticRegression_softmax(this, y);
78+
}
79+
80+
81+
82+
83+
void test_lr(void) {
84+
int i, j, epoch;
85+
86+
double learning_rate = 0.1;
87+
double n_epochs = 500;
88+
89+
int train_N = 6;
90+
int test_N = 2;
91+
int n_in = 6;
92+
int n_out = 2;
93+
94+
95+
// training data
96+
int train_X[6][6] = {
97+
{1, 1, 1, 0, 0, 0},
98+
{1, 0, 1, 0, 0, 0},
99+
{1, 1, 1, 0, 0, 0},
100+
{0, 0, 1, 1, 1, 0},
101+
{0, 0, 1, 1, 0, 0},
102+
{0, 0, 1, 1, 1, 0}
103+
};
104+
105+
int train_Y[6][2] = {
106+
{1, 0},
107+
{1, 0},
108+
{1, 0},
109+
{0, 1},
110+
{0, 1},
111+
{0, 1}
112+
};
113+
114+
115+
// construct LogisticRegression
116+
LogisticRegression classifier;
117+
LogisticRegression__construct(&classifier, train_N, n_in, n_out);
118+
119+
120+
// train
121+
for(epoch=0; epoch<n_epochs; epoch++) {
122+
for(i=0; i<train_N; i++) {
123+
LogisticRegression_train(&classifier, train_X[i], train_Y[i], learning_rate);
124+
}
125+
// learning_rate *= 0.95;
126+
}
127+
128+
129+
// test data
130+
int test_X[2][6] = {
131+
{1, 0, 1, 0, 0, 0},
132+
{0, 0, 1, 1, 1, 0}
133+
};
134+
135+
double test_Y[2][2];
136+
137+
138+
// test
139+
for(i=0; i<test_N; i++) {
140+
LogisticRegression_predict(&classifier, test_X[i], test_Y[i]);
141+
for(j=0; j<n_out; j++) {
142+
printf("%f ", test_Y[i][j]);
143+
}
144+
printf("\n");
145+
}
146+
147+
148+
149+
// destruct LogisticRegression
150+
LogisticRegression__destruct(&classifier);
151+
}
152+
153+
154+
155+
156+
int main(void) {
157+
test_lr();
158+
159+
return 0;
160+
}

c/LogisticRegression.h

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
#ifndef LOGISTICREGRESSION_H
2+
#define LOGISTICREGRESSION_H
3+
4+
5+
typedef struct {
6+
int N;
7+
int n_in;
8+
int n_out;
9+
double **W;
10+
double *b;
11+
} LogisticRegression;
12+
13+
void LogisticRegression__construct(LogisticRegression*, int, int, int);
14+
void LogisticRegression__destruct(LogisticRegression*);
15+
void LogisticRegression_train(LogisticRegression*, int*, int*, double);
16+
void LogisticRegression_softmax(LogisticRegression*, double*);
17+
void LogisticRegression_predict(LogisticRegression*, int*, double*);
18+
19+
20+
21+
#endif

cpp/LogisticRegression.cpp

Lines changed: 28 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -12,46 +12,48 @@ LogisticRegression::LogisticRegression(int size, int in, int out) {
1212

1313
// initialize W, b
1414
W = new double*[n_out];
15-
for (int i=0; i<n_out; i++) W[i] = new double[n_in];
15+
for(int i=0; i<n_out; i++) W[i] = new double[n_in];
1616
b = new double[n_out];
1717
}
1818

1919
LogisticRegression::~LogisticRegression() {
20-
for (int i=0; i<n_out; i++) delete[] W[i];
20+
for(int i=0; i<n_out; i++) delete[] W[i];
2121
delete[] W;
2222
delete[] b;
2323
}
2424

2525

2626
void LogisticRegression::train(int *x, int *y, double lr) {
27-
double p_y_given_x[n_out];
28-
double dy[n_out];
27+
double *p_y_given_x = new double[n_out];
28+
double *dy = new double[n_out];
2929

30-
for (int i=0; i<n_out; i++) {
31-
for (int j=0; j<n_in; j++) {
30+
for(int i=0; i<n_out; i++) {
31+
for(int j=0; j<n_in; j++) {
3232
p_y_given_x[i] += W[i][j] * x[j];
3333
}
3434
p_y_given_x[i] += b[i];
3535
}
3636
softmax(p_y_given_x);
3737

38-
for (int i=0; i<n_out; i++) {
38+
for(int i=0; i<n_out; i++) {
3939
dy[i] = y[i] - p_y_given_x[i];
4040

41-
for (int j=0; j<n_in; j++) {
41+
for(int j=0; j<n_in; j++) {
4242
W[i][j] += lr * dy[i] * x[j] / N;
4343
}
4444

4545
b[i] += lr * dy[i] / N;
4646
}
47+
delete[] p_y_given_x;
48+
delete[] dy;
4749
}
4850

4951
void LogisticRegression::softmax(double *x) {
5052
double max = 0.0;
5153
double sum = 0.0;
5254

53-
for (int i=0; i<n_out; i++) if(max < x[i]) max = x[i];
54-
for (int i=0; i<n_out; i++) {
55+
for(int i=0; i<n_out; i++) if(max < x[i]) max = x[i];
56+
for(int i=0; i<n_out; i++) {
5557
x[i] = exp(x[i] - max);
5658
sum += x[i];
5759
}
@@ -60,8 +62,8 @@ void LogisticRegression::softmax(double *x) {
6062
}
6163

6264
void LogisticRegression::predict(int *x, double *y) {
63-
for (int i=0; i<n_out; i++) {
64-
for (int j=0; j<n_in; j++) {
65+
for(int i=0; i<n_out; i++) {
66+
for(int j=0; j<n_in; j++) {
6567
y[i] += W[i][j] * x[j];
6668
}
6769
y[i] += b[i];
@@ -76,7 +78,7 @@ void test_lr() {
7678
double n_epochs = 500;
7779

7880
int train_N = 6;
79-
int test_N = 1;
81+
int test_N = 2;
8082
int n_in = 6;
8183
int n_out = 2;
8284
// int **train_X;
@@ -86,14 +88,14 @@ void test_lr() {
8688

8789
// train_X = new int*[train_N];
8890
// train_Y = new int*[train_N];
89-
// for (i=0; i<train_N; i++){
91+
// for(i=0; i<train_N; i++){
9092
// train_X[i] = new int[n_in];
9193
// train_Y[i] = new int[n_out];
9294
// };
9395

9496
// test_X = new int*[test_N];
9597
// test_Y = new double*[test_N];
96-
// for (i=0; i<test_N; i++){
98+
// for(i=0; i<test_N; i++){
9799
// test_X[i] = new int[n_in];
98100
// test_Y[i] = new double[n_out];
99101
// }
@@ -124,28 +126,30 @@ void test_lr() {
124126

125127

126128
// train online
127-
for (int epoch=0; epoch<n_epochs; epoch++) {
128-
for (int i=0; i<train_N; i++) {
129+
for(int epoch=0; epoch<n_epochs; epoch++) {
130+
for(int i=0; i<train_N; i++) {
129131
classifier.train(train_X[i], train_Y[i], learning_rate);
130132
}
131-
learning_rate *= 0.95;
133+
// learning_rate *= 0.95;
132134
}
133135

134136

135137
// test data
136-
int test_X[1][6] = {
137-
{1, 1, 1, 0, 0, 0}
138+
int test_X[2][6] = {
139+
{1, 0, 1, 0, 0, 0},
140+
{0, 0, 1, 1, 1, 0}
138141
};
139142

140-
double test_Y[1][2];
143+
double test_Y[2][2];
141144

142145

143146
// test
144-
for (int i=0; i<test_N; i++) {
147+
for(int i=0; i<test_N; i++) {
145148
classifier.predict(test_X[i], test_Y[i]);
146-
for (int j=0; j<n_out; j++) {
147-
cout << test_Y[i][j] << endl;
149+
for(int j=0; j<n_out; j++) {
150+
cout << test_Y[i][j] << " ";
148151
}
152+
cout << endl;
149153
}
150154

151155
}

0 commit comments

Comments
 (0)