Skip to content

Commit 771c89a

Browse files
author
Yusuke Sugomori
committed
Logistic Regression
1 parent 72da781 commit 771c89a

File tree

4 files changed

+133
-13
lines changed

4 files changed

+133
-13
lines changed

DeepBeliefNets.py

Lines changed: 30 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
#!/usr/bin/env python
22
# -*- coding: utf-8 -*-
33

4+
'''
5+
Deep Belief Nets
6+
'''
7+
48
import numpy
59
import LogisticRegression
610
import RestrictedBoltzmannMachine
@@ -9,13 +13,35 @@
913

1014

1115

12-
class DeepBeliefNets(object):
13-
def __init__(self):
14-
print 'DeepBeliefNets'
16+
class DBN(object):
17+
def __init__(self, n_ins=2, hidden_layer_sizes=[3, 3], n_outs=2, \
18+
numpy_rng=None): # constructor does not contain input
19+
20+
self.sigmoid_layers = []
21+
self.rbm_layers = []
22+
self.n_layers = len(hidden_layer_sizes)
23+
24+
25+
if numpy_rng is None:
26+
numpy_rng = numpy.random.RandomState(1234)
27+
28+
29+
assert self.n_layers > 0
30+
31+
32+
# construct multi layers
33+
for i in xrange(self.n_layers):
34+
if i == 0:
35+
input_size = n_ins
36+
else:
37+
input_size = hidden_layer_sizes[i - 1]
38+
1539

1640

1741

42+
def test_dbn():
43+
dbn = DBN()
1844

1945

2046
if __name__ == "__main__":
21-
print 'DeepBeliefNets'
47+
test_dbn()

LogisticRegression.py

Lines changed: 81 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,91 @@
11
#!/usr/bin/env python
22
# -*- coding: utf-8 -*-
33

4+
'''
5+
Logistic Regression
6+
'''
7+
8+
import sys
49
import numpy
10+
from utils import *
511

612

713
class LogisticRegression(object):
8-
def __init__(self):
9-
print 'LogisticRegression'
14+
def __init__(self, input=None, n_in=2, n_out=2):
15+
self.input = input
16+
self.W = numpy.zeros((n_in, n_out)) # initialize W 0
17+
self.b = numpy.zeros(n_out) # initialize bias 0
18+
19+
def train(self, y, lr=0.1):
20+
y_pred = softmax(numpy.dot(self.input, self.W) + self.b)
21+
d_y = y - y_pred
22+
23+
self.W += lr * numpy.dot(self.input.T, d_y)
24+
self.b += lr * numpy.mean(d_y, axis=0)
25+
26+
# cost = self.negative_log_likelihood()
27+
# return cost
28+
29+
def negative_log_likelihood(self):
30+
sigmoid_activation = sigmoid(numpy.dot(self.input, self.W) + self.b)
31+
32+
# entropy = - numpy.mean(numpy.sum(self.input * numpy.log(sigmoid_activation), axis=1))
33+
# return entropy
34+
35+
cross_entropy = - numpy.mean(
36+
numpy.sum(self.input * numpy.log(sigmoid_activation) +
37+
(1 - self.input) * numpy.log(1 - sigmoid_activation),
38+
axis=1))
39+
40+
return cross_entropy
41+
42+
43+
def predict(self, x):
44+
return sigmoid(numpy.dot(x, self.W) + self.b)
45+
46+
47+
def test_lr(learning_rate=0.1, n_epochs=100):
48+
n_epochs = 10
49+
50+
51+
# training data
52+
rng = numpy.random.RandomState(123)
53+
n_each = 10
54+
m1 = -5.
55+
s1 = 1.0
56+
m2 = 10.
57+
s2 = 10.
58+
59+
x = []
60+
y = []
61+
62+
for i in xrange(n_each):
63+
x.append([rng.normal(m1, s1), rng.normal(m1, s1)])
64+
y.append([0])
65+
66+
for i in xrange(n_each):
67+
x.append([rng.normal(m2, s2), rng.normal(m2, s2)])
68+
y.append([1])
69+
70+
x = numpy.array(x)
71+
y = numpy.array(y)
72+
73+
74+
75+
# construct LogisticRegression
76+
classifier = LogisticRegression(input=x, n_in=2, n_out=1)
77+
78+
# train
79+
for epoch in xrange(n_epochs):
80+
classifier.train(y=y, lr=learning_rate)
81+
# cost = classifier.negative_log_likelihood()
82+
# print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost
83+
84+
85+
# test
86+
x = numpy.array([-5.0, 1])
87+
print >> sys.stderr, classifier.predict(x)
1088

1189

1290
if __name__ == "__main__":
13-
print 'LogisticRegression'
91+
test_lr()

RestrictedBoltzmannMachine.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
import numpy
2121
from utils import *
2222

23-
class RestrictedBoltzmannMachine(object):
23+
class RBM(object):
2424
def __init__(self, input=None, n_visible=2, n_hidden=3, \
2525
W=None, hbias=None, vbias=None, numpy_rng=None):
2626

@@ -53,7 +53,7 @@ def __init__(self, input=None, n_visible=2, n_hidden=3, \
5353
self.hbias = hbias
5454
self.vbias = vbias
5555

56-
self.params = [self.W, self.hbias, self.vbias]
56+
# self.params = [self.W, self.hbias, self.vbias]
5757

5858

5959
def contrastive_divergence(self, lr=0.1, k=1):
@@ -77,8 +77,8 @@ def contrastive_divergence(self, lr=0.1, k=1):
7777
self.vbias += lr * numpy.mean(self.input - nv_samples, axis=1)
7878

7979

80-
cost = self.get_reconstruction_cross_entropy()
81-
return cost
80+
# cost = self.get_reconstruction_cross_entropy()
81+
# return cost
8282

8383

8484
def sample_h_given_v(self, v0_sample):
@@ -154,10 +154,11 @@ def test_rbm(learning_rate=0.1, k=1, training_epochs=15):
154154
rng = numpy.random.RandomState(123)
155155

156156
# construct RBM
157-
rbm = RestrictedBoltzmannMachine(input=data, n_visible=6, n_hidden=2, numpy_rng=rng)
157+
rbm = RBM(input=data, n_visible=6, n_hidden=2, numpy_rng=rng)
158158

159159
for epoch in xrange(training_epochs):
160-
cost = rbm.contrastive_divergence(lr=learning_rate, k=k)
160+
rbm.contrastive_divergence(lr=learning_rate, k=k)
161+
cost = rbm.get_reconstruction_cross_entropy()
161162
print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost
162163

163164

utils.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,20 @@
11
''' '''
22
import numpy
33

4+
numpy.seterr(all='ignore')
5+
6+
47
def sigmoid(x):
8+
# numpy.seterrcall(sigmoid_err_handler) # overflow handling
9+
# numpy.seterr(all='call')
510
return 1. / (1 + numpy.exp(-x))
11+
12+
13+
def sigmoid_err_handler(type, flg):
14+
# Log
15+
return
16+
17+
18+
def softmax(x):
19+
e = numpy.exp(x - numpy.max(x, axis=0)) # prevent overflow
20+
return e / numpy.sum(e)

0 commit comments

Comments
 (0)