Skip to content

Commit 0205ddb

Browse files
author
Yusuke Sugomori
committed
git mv
1 parent bd22afe commit 0205ddb

File tree

7 files changed

+220
-195
lines changed

7 files changed

+220
-195
lines changed

CDBN.py

Lines changed: 137 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,137 @@
1+
#!/usr/bin/env python
2+
# -*- coding: utf-8 -*-
3+
4+
'''
5+
DBN w/ continuous-valued inputs (Linear Energy)
6+
7+
References :
8+
- Y. Bengio, P. Lamblin, D. Popovici, H. Larochelle: Greedy Layer-Wise
9+
Training of Deep Networks, Advances in Neural Information Processing
10+
Systems 19, 2007
11+
12+
'''
13+
14+
import sys
15+
import numpy
16+
from HiddenLayer import HiddenLayer
17+
from LogisticRegression import LogisticRegression
18+
from RBM import RBM
19+
from CRBM import CRBM
20+
from DBN import DBN
21+
from utils import *
22+
23+
24+
25+
26+
class CDBN(DBN):
27+
def __init__(self, input=None, label=None,\
28+
n_ins=2, hidden_layer_sizes=[3, 3], n_outs=2,\
29+
numpy_rng=None):
30+
31+
self.x = input
32+
self.y = label
33+
34+
self.sigmoid_layers = []
35+
self.rbm_layers = []
36+
self.n_layers = len(hidden_layer_sizes) # = len(self.rbm_layers)
37+
38+
if numpy_rng is None:
39+
numpy_rng = numpy.random.RandomState(1234)
40+
41+
42+
assert self.n_layers > 0
43+
44+
45+
# construct multi-layer
46+
for i in xrange(self.n_layers):
47+
# layer_size
48+
if i == 0:
49+
input_size = n_ins
50+
else:
51+
input_size = hidden_layer_sizes[i - 1]
52+
53+
# layer_input
54+
if i == 0:
55+
layer_input = self.x
56+
else:
57+
layer_input = self.sigmoid_layers[-1].sample_h_given_v()
58+
59+
# construct sigmoid_layer
60+
sigmoid_layer = HiddenLayer(input=layer_input,
61+
n_in=input_size,
62+
n_out=hidden_layer_sizes[i],
63+
numpy_rng=numpy_rng,
64+
activation=sigmoid)
65+
self.sigmoid_layers.append(sigmoid_layer)
66+
67+
# construct rbm_layer
68+
if i == 0:
69+
rbm_layer = CRBM(input=layer_input, # continuous-valued inputs
70+
n_visible=input_size,
71+
n_hidden=hidden_layer_sizes[i],
72+
W=sigmoid_layer.W, # W, b are shared
73+
hbias=sigmoid_layer.b)
74+
else:
75+
rbm_layer = RBM(input=layer_input,
76+
n_visible=input_size,
77+
n_hidden=hidden_layer_sizes[i],
78+
W=sigmoid_layer.W, # W, b are shared
79+
hbias=sigmoid_layer.b)
80+
81+
self.rbm_layers.append(rbm_layer)
82+
83+
84+
# layer for output using Logistic Regression
85+
self.log_layer = LogisticRegression(input=self.sigmoid_layers[-1].sample_h_given_v(),
86+
label=self.y,
87+
n_in=hidden_layer_sizes[-1],
88+
n_out=n_outs)
89+
90+
# finetune cost: the negative log likelihood of the logistic regression layer
91+
self.finetune_cost = self.log_layer.negative_log_likelihood()
92+
93+
94+
95+
def test_cdbn(pretrain_lr=0.1, pretraining_epochs=1000, k=1, \
96+
finetune_lr=0.1, finetune_epochs=200):
97+
98+
x = numpy.array([[0.4, 0.5, 0.5, 0., 0., 0.],
99+
[0.5, 0.3, 0.5, 0., 0., 0.],
100+
[0.4, 0.5, 0.5, 0., 0., 0.],
101+
[0., 0., 0.5, 0.3, 0.5, 0.],
102+
[0., 0., 0.5, 0.4, 0.5, 0.],
103+
[0., 0., 0.5, 0.5, 0.5, 0.]])
104+
105+
y = numpy.array([[1, 0],
106+
[1, 0],
107+
[1, 0],
108+
[0, 1],
109+
[0, 1],
110+
[0, 1]])
111+
112+
113+
rng = numpy.random.RandomState(123)
114+
115+
# construct DBN
116+
dbn = CDBN(input=x, label=y, n_ins=6, hidden_layer_sizes=[5, 5], n_outs=2, numpy_rng=rng)
117+
118+
# pre-training (TrainUnsupervisedDBN)
119+
dbn.pretrain(lr=pretrain_lr, k=1, epochs=pretraining_epochs)
120+
121+
# fine-tuning (DBNSupervisedFineTuning)
122+
dbn.finetune(lr=finetune_lr, epochs=finetune_epochs)
123+
124+
125+
# test
126+
x = numpy.array([[0.5, 0.5, 0., 0., 0., 0.],
127+
[0., 0., 0., 0.5, 0.5, 0.],
128+
[0.5, 0.5, 0.5, 0.5, 0.5, 0.]])
129+
130+
131+
print dbn.predict(x)
132+
133+
134+
135+
136+
if __name__ == "__main__":
137+
test_cdbn()

CRBM.py

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
#!/usr/bin/env python
2+
# -*- coding: utf-8 -*-
3+
4+
"""
5+
RBM w/ continuous-valued inputs (Linear Energy)
6+
7+
References :
8+
- Y. Bengio, P. Lamblin, D. Popovici, H. Larochelle: Greedy Layer-Wise
9+
Training of Deep Networks, Advances in Neural Information Processing
10+
Systems 19, 2007
11+
12+
"""
13+
14+
import sys
15+
import numpy
16+
from RBM import RBM
17+
from utils import *
18+
19+
20+
class CRBM(RBM):
21+
def propdown(self, h):
22+
pre_activation = numpy.dot(h, self.W.T) + self.vbias
23+
return pre_activation
24+
25+
26+
27+
def sample_v_given_h(self, h0_sample):
28+
a_h = self.propdown(h0_sample)
29+
en = numpy.exp(-a_h)
30+
ep = numpy.exp(a_h)
31+
32+
v1_mean = 1 / (1 - en) - 1 / a_h
33+
U = numpy.array(self.numpy_rng.uniform(
34+
low=0,
35+
high=1,
36+
size=v1_mean.shape))
37+
38+
v1_sample = numpy.log((1 - U * (1 - ep))) / a_h
39+
40+
return [v1_mean, v1_sample]
41+
42+
43+
44+
def test_crbm(learning_rate=0.1, k=1, training_epochs=1000):
45+
data = numpy.array([[0.4, 0.5, 0.5, 0., 0., 0.],
46+
[0.5, 0.3, 0.5, 0., 0., 0.],
47+
[0.4, 0.5, 0.5, 0., 0., 0.],
48+
[0., 0., 0.5, 0.3, 0.5, 0.],
49+
[0., 0., 0.5, 0.4, 0.5, 0.],
50+
[0., 0., 0.5, 0.5, 0.5, 0.]])
51+
52+
53+
rng = numpy.random.RandomState(123)
54+
55+
# construct CRBM
56+
rbm = CRBM(input=data, n_visible=6, n_hidden=5, numpy_rng=rng)
57+
58+
# train
59+
for epoch in xrange(training_epochs):
60+
rbm.contrastive_divergence(lr=learning_rate, k=k)
61+
# cost = rbm.get_reconstruction_cross_entropy()
62+
# print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost
63+
64+
65+
# test
66+
v = numpy.array([[0.5, 0.5, 0., 0., 0., 0.],
67+
[0., 0., 0., 0.5, 0.5, 0.]])
68+
69+
print rbm.reconstruct(v)
70+
71+
72+
if __name__ == "__main__":
73+
test_crbm()

DeepBeliefNets.py renamed to DBN.py

Lines changed: 1 addition & 121 deletions
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,13 @@
1313
- DeepLearningTutorials
1414
https://github.com/lisa-lab/DeepLearningTutorials
1515
16-
1716
'''
1817

1918
import sys
2019
import numpy
2120
from HiddenLayer import HiddenLayer
2221
from LogisticRegression import LogisticRegression
23-
from RestrictedBoltzmannMachine import RBM, CRBM
22+
from RBM import RBM
2423
from utils import *
2524

2625

@@ -143,79 +142,6 @@ def predict(self, x):
143142
out = self.log_layer.predict(layer_input)
144143
return out
145144

146-
'''
147-
DBN w/ continuous-valued inputs (Linear Energy)
148-
149-
'''
150-
class CDBN(DBN):
151-
def __init__(self, input=None, label=None,\
152-
n_ins=2, hidden_layer_sizes=[3, 3], n_outs=2,\
153-
numpy_rng=None):
154-
155-
self.x = input
156-
self.y = label
157-
158-
self.sigmoid_layers = []
159-
self.rbm_layers = []
160-
self.n_layers = len(hidden_layer_sizes) # = len(self.rbm_layers)
161-
162-
if numpy_rng is None:
163-
numpy_rng = numpy.random.RandomState(1234)
164-
165-
166-
assert self.n_layers > 0
167-
168-
169-
# construct multi-layer
170-
for i in xrange(self.n_layers):
171-
# layer_size
172-
if i == 0:
173-
input_size = n_ins
174-
else:
175-
input_size = hidden_layer_sizes[i - 1]
176-
177-
# layer_input
178-
if i == 0:
179-
layer_input = self.x
180-
else:
181-
layer_input = self.sigmoid_layers[-1].sample_h_given_v()
182-
183-
# construct sigmoid_layer
184-
sigmoid_layer = HiddenLayer(input=layer_input,
185-
n_in=input_size,
186-
n_out=hidden_layer_sizes[i],
187-
numpy_rng=numpy_rng,
188-
activation=sigmoid)
189-
self.sigmoid_layers.append(sigmoid_layer)
190-
191-
# construct rbm_layer
192-
if i == 0:
193-
rbm_layer = CRBM(input=layer_input, # continuous-valued inputs
194-
n_visible=input_size,
195-
n_hidden=hidden_layer_sizes[i],
196-
W=sigmoid_layer.W, # W, b are shared
197-
hbias=sigmoid_layer.b)
198-
else:
199-
rbm_layer = RBM(input=layer_input,
200-
n_visible=input_size,
201-
n_hidden=hidden_layer_sizes[i],
202-
W=sigmoid_layer.W, # W, b are shared
203-
hbias=sigmoid_layer.b)
204-
205-
self.rbm_layers.append(rbm_layer)
206-
207-
208-
# layer for output using Logistic Regression
209-
self.log_layer = LogisticRegression(input=self.sigmoid_layers[-1].sample_h_given_v(),
210-
label=self.y,
211-
n_in=hidden_layer_sizes[-1],
212-
n_out=n_outs)
213-
214-
# finetune cost: the negative log likelihood of the logistic regression layer
215-
self.finetune_cost = self.log_layer.negative_log_likelihood()
216-
217-
218-
219145

220146

221147
def test_dbn(pretrain_lr=0.1, pretraining_epochs=1000, k=1, \
@@ -256,51 +182,5 @@ def test_dbn(pretrain_lr=0.1, pretraining_epochs=1000, k=1, \
256182

257183

258184

259-
def test_cdbn(pretrain_lr=0.1, pretraining_epochs=1000, k=1, \
260-
finetune_lr=0.1, finetune_epochs=200):
261-
262-
x = numpy.array([[0.4, 0.5, 0.5, 0., 0., 0.],
263-
[0.5, 0.3, 0.5, 0., 0., 0.],
264-
[0.4, 0.5, 0.5, 0., 0., 0.],
265-
[0., 0., 0.5, 0.3, 0.5, 0.],
266-
[0., 0., 0.5, 0.4, 0.5, 0.],
267-
[0., 0., 0.5, 0.5, 0.5, 0.]])
268-
269-
y = numpy.array([[1, 0],
270-
[1, 0],
271-
[1, 0],
272-
[0, 1],
273-
[0, 1],
274-
[0, 1]])
275-
276-
277-
rng = numpy.random.RandomState(123)
278-
279-
# construct DBN
280-
dbn = CDBN(input=x, label=y, n_ins=6, hidden_layer_sizes=[5, 5], n_outs=2, numpy_rng=rng)
281-
282-
# pre-training (TrainUnsupervisedDBN)
283-
dbn.pretrain(lr=pretrain_lr, k=1, epochs=pretraining_epochs)
284-
285-
# fine-tuning (DBNSupervisedFineTuning)
286-
dbn.finetune(lr=finetune_lr, epochs=finetune_epochs)
287-
288-
289-
# test
290-
x = numpy.array([[0.5, 0.5, 0., 0., 0., 0.],
291-
[0., 0., 0., 0.5, 0.5, 0.],
292-
[0.5, 0.5, 0.5, 0.5, 0.5, 0.]])
293-
294-
295-
print dbn.predict(x)
296-
297-
298-
299-
300185
if __name__ == "__main__":
301-
print >> sys.stderr, 'DBN'
302186
test_dbn()
303-
304-
# print >> sys.stderr
305-
# print >> sys.stderr, 'CDBN'
306-
# test_cdbn()

HiddenLayer.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,10 @@
44
'''
55
Hidden Layer
66
7-
87
References :
98
- DeepLearningTutorials
109
https://github.com/lisa-lab/DeepLearningTutorials
1110
12-
1311
'''
1412

1513
import sys

LogisticRegression.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
- DeepLearningTutorials
1212
https://github.com/lisa-lab/DeepLearningTutorials
1313
14-
1514
'''
1615

1716
import sys

0 commit comments

Comments
 (0)