Skip to content

Commit edf9d0a

Browse files
author
Yusuke Sugomori
committed
bug fix
1 parent 4da866e commit edf9d0a

File tree

3 files changed

+11
-11
lines changed

3 files changed

+11
-11
lines changed

DeepBeliefNets.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ def predict(self, x):
134134

135135

136136
def test_dbn(pretrain_lr=0.1, pretraining_epochs=1000, k=1, \
137-
finetune_lr=0.1, finetune_epochs=1000):
137+
finetune_lr=0.1, finetune_epochs=200):
138138

139139
x = numpy.array([[1,1,1,0,0,0],
140140
[1,0,1,0,0,0],
@@ -153,7 +153,7 @@ def test_dbn(pretrain_lr=0.1, pretraining_epochs=1000, k=1, \
153153
rng = numpy.random.RandomState(123)
154154

155155
# construct DBN
156-
dbn = DBN(input=x, label=y, n_ins=6, hidden_layer_sizes=[2], n_outs=2, numpy_rng=rng)
156+
dbn = DBN(input=x, label=y, n_ins=6, hidden_layer_sizes=[10], n_outs=2, numpy_rng=rng)
157157

158158
# pre-training (TrainUnsupervisedDBN)
159159
dbn.pretrain(lr=pretrain_lr, k=1, epochs=pretraining_epochs)

LogisticRegression.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -25,21 +25,21 @@ def __init__(self, input, label, n_in, n_out):
2525

2626
# self.params = [self.W, self.b]
2727

28-
def train(self, lr=0.1, input=None):
28+
def train(self, lr=0.1, input=None, L2_reg=0.00):
2929
if input is not None:
3030
self.x = input
3131

32-
p_y_given_x = softmax(numpy.dot(self.x, self.W) + self.b)
32+
p_y_given_x = sigmoid(numpy.dot(self.x, self.W) + self.b)
3333
d_y = self.y - p_y_given_x
3434

35-
self.W += lr * numpy.dot(self.x.T, d_y)
35+
self.W += lr * numpy.dot(self.x.T, d_y) - lr * L2_reg * self.W
3636
self.b += lr * numpy.mean(d_y, axis=0)
3737

3838
# cost = self.negative_log_likelihood()
3939
# return cost
4040

4141
def negative_log_likelihood(self):
42-
sigmoid_activation = softmax(numpy.dot(self.x, self.W) + self.b)
42+
sigmoid_activation = sigmoid(numpy.dot(self.x, self.W) + self.b)
4343

4444
cross_entropy = - numpy.mean(
4545
numpy.sum(self.y * numpy.log(sigmoid_activation) +
@@ -50,10 +50,10 @@ def negative_log_likelihood(self):
5050

5151

5252
def predict(self, x):
53-
return softmax(numpy.dot(x, self.W) + self.b)
53+
return sigmoid(numpy.dot(x, self.W) + self.b)
5454

5555

56-
def test_lr(learning_rate=0.01, n_epochs=1000):
56+
def test_lr(learning_rate=0.01, n_epochs=200):
5757
# training data
5858
x = numpy.array([[1,1,1,0,0,0],
5959
[1,0,1,0,0,0],
@@ -75,7 +75,7 @@ def test_lr(learning_rate=0.01, n_epochs=1000):
7575
# train
7676
for epoch in xrange(n_epochs):
7777
classifier.train(lr=learning_rate)
78-
# cost = classifier.negative_log_likelihood(y=y)
78+
# cost = classifier.negative_log_likelihood()
7979
# print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost
8080
learning_rate *= 0.95
8181

RestrictedBoltzmannMachine.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -154,8 +154,8 @@ def test_rbm(learning_rate=0.1, k=1, training_epochs=1000):
154154

155155
for epoch in xrange(training_epochs):
156156
rbm.contrastive_divergence(lr=learning_rate, k=k)
157-
cost = rbm.get_reconstruction_cross_entropy()
158-
print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost
157+
# cost = rbm.get_reconstruction_cross_entropy()
158+
# print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost
159159

160160

161161
# test

0 commit comments

Comments
 (0)