Skip to content

Commit 40af338

Browse files
author
Yusuke Sugomori
committed
multiclass Logistic Regression
1 parent 3563a33 commit 40af338

File tree

1 file changed

+6
-3
lines changed

1 file changed

+6
-3
lines changed

LogisticRegression.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,8 @@ def train(self, lr=0.1, input=None, L2_reg=0.00):
2929
if input is not None:
3030
self.x = input
3131

32-
p_y_given_x = sigmoid(numpy.dot(self.x, self.W) + self.b)
32+
# p_y_given_x = sigmoid(numpy.dot(self.x, self.W) + self.b)
33+
p_y_given_x = softmax(numpy.dot(self.x, self.W) + self.b)
3334
d_y = self.y - p_y_given_x
3435

3536
self.W += lr * numpy.dot(self.x.T, d_y) - lr * L2_reg * self.W
@@ -39,7 +40,8 @@ def train(self, lr=0.1, input=None, L2_reg=0.00):
3940
# return cost
4041

4142
def negative_log_likelihood(self):
42-
sigmoid_activation = sigmoid(numpy.dot(self.x, self.W) + self.b)
43+
# sigmoid_activation = sigmoid(numpy.dot(self.x, self.W) + self.b)
44+
sigmoid_activation = softmax(numpy.dot(self.x, self.W) + self.b)
4345

4446
cross_entropy = - numpy.mean(
4547
numpy.sum(self.y * numpy.log(sigmoid_activation) +
@@ -50,7 +52,8 @@ def negative_log_likelihood(self):
5052

5153

5254
def predict(self, x):
53-
return sigmoid(numpy.dot(x, self.W) + self.b)
55+
# return sigmoid(numpy.dot(x, self.W) + self.b)
56+
return softmax(numpy.dot(x, self.W) + self.b)
5457

5558

5659
def test_lr(learning_rate=0.01, n_epochs=200):

0 commit comments

Comments
 (0)