Skip to content

Commit ed3c0ef

Browse files
author
Mofan Zhou
committed
update theano TUT
1 parent d19ab50 commit ed3c0ef

File tree

17 files changed

+778
-0
lines changed

17 files changed

+778
-0
lines changed
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
# View more python tutorials on my Youtube and Youku channel!!!
2+
3+
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
4+
# Youku video tutorial: http://i.youku.com/pythontutorial
5+
6+
# 10 - visualize result
7+
"""
8+
Please note, this code is only for python 3+. If you are using python 2+, please modify the code accordingly.
9+
"""
10+
import theano
11+
import theano.tensor as T
12+
import numpy as np
13+
import matplotlib.pyplot as plt
14+
15+
16+
class Layer(object):
17+
def __init__(self, inputs, in_size, out_size, activation_function=None):
18+
self.W = theano.shared(np.random.normal(0, 1, (in_size, out_size)))
19+
self.b = theano.shared(np.zeros((out_size, )) + 0.1)
20+
self.Wx_plus_b = T.dot(inputs, self.W) + self.b
21+
self.activation_function = activation_function
22+
if activation_function is None:
23+
self.outputs = self.Wx_plus_b
24+
else:
25+
self.outputs = self.activation_function(self.Wx_plus_b)
26+
27+
28+
# Make up some fake data
29+
x_data = np.linspace(-1, 1, 300)[:, np.newaxis]
30+
noise = np.random.normal(0, 0.05, x_data.shape)
31+
y_data = np.square(x_data) - 0.5 + noise # y = x^2 - 0.5
32+
33+
# show the fake data
34+
plt.scatter(x_data, y_data)
35+
plt.show()
36+
37+
# determine the inputs dtype
38+
x = T.dmatrix("x")
39+
y = T.dmatrix("y")
40+
41+
# add layers
42+
l1 = Layer(x, 1, 10, T.nnet.relu)
43+
l2 = Layer(l1.outputs, 10, 1, None)
44+
45+
# compute the cost
46+
cost = T.mean(T.square(l2.outputs - y))
47+
48+
# compute the gradients
49+
gW1, gb1, gW2, gb2 = T.grad(cost, [l1.W, l1.b, l2.W, l2.b])
50+
51+
# apply gradient descent
52+
learning_rate = 0.05
53+
train = theano.function(
54+
inputs=[x, y],
55+
outputs=[cost],
56+
updates=[(l1.W, l1.W - learning_rate * gW1),
57+
(l1.b, l1.b - learning_rate * gb1),
58+
(l2.W, l2.W - learning_rate * gW2),
59+
(l2.b, l2.b - learning_rate * gb2)])
60+
61+
# prediction
62+
predict = theano.function(inputs=[x], outputs=l2.outputs)
63+
64+
# plot the real data
65+
66+
67+
for i in range(1000):
68+
# training
69+
err = train(x_data, y_data)
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
# View more python tutorials on my Youtube and Youku channel!!!
2+
3+
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
4+
# Youku video tutorial: http://i.youku.com/pythontutorial
5+
6+
# 10 - visualize result
7+
"""
8+
Please note, this code is only for python 3+. If you are using python 2+, please modify the code accordingly.
9+
"""
10+
import theano
11+
import theano.tensor as T
12+
import numpy as np
13+
import matplotlib.pyplot as plt
14+
15+
16+
class Layer(object):
17+
def __init__(self, inputs, in_size, out_size, activation_function=None):
18+
self.W = theano.shared(np.random.normal(0, 1, (in_size, out_size)))
19+
self.b = theano.shared(np.zeros((out_size, )) + 0.1)
20+
self.Wx_plus_b = T.dot(inputs, self.W) + self.b
21+
self.activation_function = activation_function
22+
if activation_function is None:
23+
self.outputs = self.Wx_plus_b
24+
else:
25+
self.outputs = self.activation_function(self.Wx_plus_b)
26+
27+
28+
# Make up some fake data
29+
x_data = np.linspace(-1, 1, 300)[:, np.newaxis]
30+
noise = np.random.normal(0, 0.05, x_data.shape)
31+
y_data = np.square(x_data) - 0.5 + noise # y = x^2 - 0.5
32+
33+
# show the fake data
34+
plt.scatter(x_data, y_data)
35+
plt.show()
36+
37+
# determine the inputs dtype
38+
x = T.dmatrix("x")
39+
y = T.dmatrix("y")
40+
41+
# add layers
42+
l1 = Layer(x, 1, 10, T.nnet.relu)
43+
l2 = Layer(l1.outputs, 10, 1, None)
44+
45+
# compute the cost
46+
cost = T.mean(T.square(l2.outputs - y))
47+
48+
# compute the gradients
49+
gW1, gb1, gW2, gb2 = T.grad(cost, [l1.W, l1.b, l2.W, l2.b])
50+
51+
# apply gradient descent
52+
learning_rate = 0.05
53+
train = theano.function(
54+
inputs=[x, y],
55+
outputs=[cost],
56+
updates=[(l1.W, l1.W - learning_rate * gW1),
57+
(l1.b, l1.b - learning_rate * gb1),
58+
(l2.W, l2.W - learning_rate * gW2),
59+
(l2.b, l2.b - learning_rate * gb2)])
60+
61+
# prediction
62+
predict = theano.function(inputs=[x], outputs=l2.outputs)
63+
64+
# plot the real data
65+
fig = plt.figure()
66+
ax = fig.add_subplot(1,1,1)
67+
ax.scatter(x_data, y_data)
68+
plt.ion()
69+
plt.show()
70+
71+
for i in range(1000):
72+
# training
73+
err = train(x_data, y_data)
74+
if i % 50 == 0:
75+
# to visualize the result and improvement
76+
try:
77+
ax.lines.remove(lines[0])
78+
except Exception:
79+
pass
80+
prediction_value = predict(x_data)
81+
# plot the prediction
82+
lines = ax.plot(x_data, prediction_value, 'r-', lw=5)
83+
plt.pause(.5)
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
# View more python tutorials on my Youtube and Youku channel!!!
2+
3+
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
4+
# Youku video tutorial: http://i.youku.com/pythontutorial
5+
6+
# 11 - classification example
7+
"""
8+
Please note, this code is only for python 3+. If you are using python 2+, please modify the code accordingly.
9+
"""
10+
import numpy as np
11+
import theano
12+
import theano.tensor as T
13+
14+
def compute_accuracy(y_target, y_predict):
15+
correct_prediction = np.equal(y_predict, y_target)
16+
accuracy = np.sum(correct_prediction)/len(correct_prediction)
17+
return accuracy
18+
19+
rng = np.random
20+
21+
N = 400 # training sample size
22+
feats = 784 # number of input variables
23+
24+
# generate a dataset: D = (input_values, target_class)
25+
D = (rng.randn(N, feats), rng.randint(size=N, low=0, high=2))
26+
27+
# Declare Theano symbolic variables
28+
29+
30+
# initialize the weights and biases
31+
32+
33+
# Construct Theano expression graph
34+
35+
36+
# Compile
37+
38+
39+
# Training
40+
for i in range(500):
41+
pass
42+
if i % 50 == 0:
43+
pass
44+
45+
print("target values for D:")
46+
print('')
47+
print("prediction on D:")
48+
print('')
49+
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
# View more python tutorials on my Youtube and Youku channel!!!
2+
3+
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
4+
# Youku video tutorial: http://i.youku.com/pythontutorial
5+
6+
# 11 - classification example
7+
"""
8+
Please note, this code is only for python 3+. If you are using python 2+, please modify the code accordingly.
9+
"""
10+
import numpy as np
11+
import theano
12+
import theano.tensor as T
13+
14+
def compute_accuracy(y_target, y_predict):
15+
correct_prediction = np.equal(y_predict, y_target)
16+
accuracy = np.sum(correct_prediction)/len(correct_prediction)
17+
return accuracy
18+
19+
rng = np.random
20+
21+
N = 400 # training sample size
22+
feats = 784 # number of input variables
23+
24+
# generate a dataset: D = (input_values, target_class)
25+
D = (rng.randn(N, feats), rng.randint(size=N, low=0, high=2))
26+
27+
# Declare Theano symbolic variables
28+
x = T.dmatrix("x")
29+
y = T.dvector("y")
30+
31+
# initialize the weights and biases
32+
w = theano.shared(rng.randn(feats), name="w")
33+
b = theano.shared(0., name="b")
34+
35+
36+
# Construct Theano expression graph
37+
p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Logistic Probability that target = 1 (activation function)
38+
prediction = p_1 > 0.5 # The prediction thresholded
39+
xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function
40+
cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize (l2 regularization)
41+
gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost
42+
43+
44+
# Compile
45+
learning_rate = 0.1
46+
train = theano.function(
47+
inputs=[x, y],
48+
outputs=[prediction, xent.mean()],
49+
updates=((w, w - learning_rate * gw), (b, b - learning_rate * gb)))
50+
predict = theano.function(inputs=[x], outputs=prediction)
51+
52+
# Training
53+
for i in range(500):
54+
pred, err = train(D[0], D[1])
55+
if i % 50 == 0:
56+
print('cost:', err)
57+
print("accuracy:", compute_accuracy(D[1], predict(D[0])))
58+
59+
print("target values for D:")
60+
print(D[1])
61+
print("prediction on D:")
62+
print(predict(D[0]))
63+
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# View more python tutorials on my Youtube and Youku channel!!!
2+
3+
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
4+
# Youku video tutorial: http://i.youku.com/pythontutorial
5+
6+
# 12 - cross validation
7+
"""
8+
Please note, this code is only for python 3+. If you are using python 2+, please modify the code accordingly.
9+
"""
10+
import theano
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# View more python tutorials on my Youtube and Youku channel!!!
2+
3+
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
4+
# Youku video tutorial: http://i.youku.com/pythontutorial
5+
6+
# 12 - cross validation
7+
"""
8+
Please note, this code is only for python 3+. If you are using python 2+, please modify the code accordingly.
9+
"""
10+
import theano

theanoTUT/theano3_what_ML_does.py

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
# View more python tutorials on my Youtube and Youku channel!!!
2+
3+
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
4+
# Youku video tutorial: http://i.youku.com/pythontutorial
5+
6+
# 3 - What does machine learning do?
7+
"""
8+
Please note, this code is only for python 3+. If you are using python 2+, please modify the code accordingly.
9+
"""
10+
import theano
11+
import theano.tensor as T
12+
import numpy as np
13+
import matplotlib.pyplot as plt
14+
15+
16+
class Layer(object):
17+
def __init__(self, inputs, in_size, out_size, activation_function=None):
18+
self.W = theano.shared(np.random.normal(0, 1, (in_size, out_size)))
19+
self.b = theano.shared(np.zeros((out_size, )) + 0.1)
20+
self.Wx_plus_b = T.dot(inputs, self.W) + self.b
21+
self.activation_function = activation_function
22+
if activation_function is None:
23+
self.outputs = self.Wx_plus_b
24+
else:
25+
self.outputs = self.activation_function(self.Wx_plus_b)
26+
27+
28+
# Make up some fake data
29+
x_data = np.linspace(-1, 1, 300)[:, np.newaxis]
30+
noise = np.random.normal(0, 0.05, x_data.shape)
31+
y_data = np.square(x_data) - 0.5 + noise # y = x^2 - 0.5
32+
33+
# show the fake data
34+
plt.scatter(x_data, y_data)
35+
plt.show()
36+
37+
# determine the inputs dtype
38+
x = T.dmatrix("x")
39+
y = T.dmatrix("y")
40+
41+
# add layers
42+
l1 = Layer(x, 1, 10, T.nnet.relu)
43+
l2 = Layer(l1.outputs, 10, 1, None)
44+
45+
# compute the cost
46+
cost = T.mean(T.square(l2.outputs - y))
47+
48+
# compute the gradients
49+
gW1, gb1, gW2, gb2 = T.grad(cost, [l1.W, l1.b, l2.W, l2.b])
50+
51+
# apply gradient descent
52+
learning_rate = 0.1
53+
train = theano.function(
54+
inputs=[x, y],
55+
outputs=[cost],
56+
updates=[(l1.W, l1.W - learning_rate * gW1),
57+
(l1.b, l1.b - learning_rate * gb1),
58+
(l2.W, l2.W - learning_rate * gW2),
59+
(l2.b, l2.b - learning_rate * gb2)])
60+
61+
# prediction
62+
predict = theano.function(inputs=[x], outputs=l2.outputs)
63+
64+
# plot the real data
65+
fig = plt.figure()
66+
ax = fig.add_subplot(1,1,1)
67+
ax.scatter(x_data, y_data)
68+
plt.ion()
69+
plt.show()
70+
71+
for i in range(1000):
72+
# training
73+
err = train(x_data, y_data)
74+
if i % 50 == 0:
75+
# to visualize the result and improvement
76+
try:
77+
ax.lines.remove(lines[0])
78+
except Exception:
79+
pass
80+
prediction_value = predict(x_data)
81+
# plot the prediction
82+
lines = ax.plot(x_data, prediction_value, 'r-', lw=5)
83+
plt.pause(.5)

0 commit comments

Comments
 (0)