Skip to content

Commit 3012646

Browse files
author
Yusuke Sugomori
committed
CRBM
1 parent f35193b commit 3012646

File tree

2 files changed

+85
-24
lines changed

2 files changed

+85
-24
lines changed

RestrictedBoltzmannMachine.py

Lines changed: 75 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -61,17 +61,17 @@ def contrastive_divergence(self, lr=0.1, k=1, input=None):
6161
self.input = input
6262

6363
''' CD-k '''
64-
pre_sigmoid_ph, ph_mean, ph_sample = self.sample_h_given_v(self.input)
64+
ph_mean, ph_sample = self.sample_h_given_v(self.input)
6565

6666
chain_start = ph_sample
6767

6868
for step in xrange(k):
6969
if step == 0:
70-
pre_sigmoid_nvs, nv_means, nv_samples,\
71-
pre_sigmoid_nhs, nh_means, nh_samples = self.gibbs_hvh(chain_start)
70+
nv_means, nv_samples,\
71+
nh_means, nh_samples = self.gibbs_hvh(chain_start)
7272
else:
73-
pre_sigmoid_nvs, nv_means, nv_samples,\
74-
pre_sigmoid_nhs, nh_means, nh_samples = self.gibbs_hvh(nh_samples)
73+
nv_means, nv_samples,\
74+
nh_means, nh_samples = self.gibbs_hvh(nh_samples)
7575

7676
# chain_end = nv_samples
7777

@@ -86,37 +86,37 @@ def contrastive_divergence(self, lr=0.1, k=1, input=None):
8686

8787

8888
def sample_h_given_v(self, v0_sample):
89-
pre_sigmoid_h1, h1_mean = self.propup(v0_sample)
89+
h1_mean = self.propup(v0_sample)
9090
h1_sample = self.numpy_rng.binomial(size=h1_mean.shape, # discrete: binomial
9191
n=1,
9292
p=h1_mean)
9393

94-
return [pre_sigmoid_h1, h1_mean, h1_sample]
94+
return [h1_mean, h1_sample]
9595

9696

9797
def sample_v_given_h(self, h0_sample):
98-
pre_sigmoid_v1, v1_mean = self.propdown(h0_sample)
98+
v1_mean = self.propdown(h0_sample)
9999
v1_sample = self.numpy_rng.binomial(size=v1_mean.shape, # discrete: binomial
100100
n=1,
101101
p=v1_mean)
102102

103-
return [pre_sigmoid_v1, v1_mean, v1_sample]
103+
return [v1_mean, v1_sample]
104104

105105
def propup(self, v):
106106
pre_sigmoid_activation = numpy.dot(v, self.W) + self.hbias
107-
return [pre_sigmoid_activation, sigmoid(pre_sigmoid_activation)]
107+
return sigmoid(pre_sigmoid_activation)
108108

109109
def propdown(self, h):
110110
pre_sigmoid_activation = numpy.dot(h, self.W.T) + self.vbias
111-
return [pre_sigmoid_activation, sigmoid(pre_sigmoid_activation)]
111+
return sigmoid(pre_sigmoid_activation)
112112

113113

114114
def gibbs_hvh(self, h0_sample):
115-
pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h0_sample)
116-
pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v1_sample)
115+
v1_mean, v1_sample = self.sample_v_given_h(h0_sample)
116+
h1_mean, h1_sample = self.sample_h_given_v(v1_sample)
117117

118-
return [pre_sigmoid_v1, v1_mean, v1_sample,
119-
pre_sigmoid_h1, h1_mean, h1_sample]
118+
return [v1_mean, v1_sample,
119+
h1_mean, h1_sample]
120120

121121

122122
def get_reconstruction_cross_entropy(self):
@@ -139,6 +139,35 @@ def reconstruct(self, v):
139139
return reconstructed_v
140140

141141

142+
'''
143+
RBM w/ continuous-valued inputs (Linear Energy)
144+
'''
145+
class CRBM(RBM):
146+
def propdown(self, h):
147+
pre_activation = numpy.dot(h, self.W.T) + self.vbias
148+
return pre_activation
149+
150+
151+
152+
def sample_v_given_h(self, h0_sample):
153+
a_h = self.propdown(h0_sample)
154+
en = numpy.exp(-a_h)
155+
ep = numpy.exp(a_h)
156+
157+
v1_mean = 1 / (1 - en) - 1 / a_h
158+
U = numpy.array(self.numpy_rng.uniform(
159+
low=0,
160+
high=1,
161+
size=v1_mean.shape))
162+
163+
v1_sample = numpy.log((1 - U * (1 - ep))) / a_h
164+
165+
166+
return [v1_mean, v1_sample]
167+
168+
169+
170+
142171
def test_rbm(learning_rate=0.1, k=1, training_epochs=1000):
143172
data = numpy.array([[1,1,1,0,0,0],
144173
[1,0,1,0,0,0],
@@ -148,7 +177,6 @@ def test_rbm(learning_rate=0.1, k=1, training_epochs=1000):
148177
[0,0,1,1,1,0]])
149178

150179

151-
152180
rng = numpy.random.RandomState(123)
153181

154182
# construct RBM
@@ -168,5 +196,35 @@ def test_rbm(learning_rate=0.1, k=1, training_epochs=1000):
168196
print rbm.reconstruct(v)
169197

170198

199+
200+
def test_crbm(learning_rate=0.1, k=1, training_epochs=1000):
201+
data = numpy.array([[0.4, 0.5, 0.5, 0., 0., 0.],
202+
[0.5, 0.3, 0.5, 0., 0., 0.],
203+
[0.4, 0.5, 0.5, 0., 0., 0.],
204+
[0., 0., 0.5, 0.3, 0.5, 0.],
205+
[0., 0., 0.5, 0.4, 0.5, 0.],
206+
[0., 0., 0.5, 0.5, 0.5, 0.]])
207+
208+
209+
rng = numpy.random.RandomState(123)
210+
211+
# construct CRBM
212+
rbm = CRBM(input=data, n_visible=6, n_hidden=5, numpy_rng=rng)
213+
214+
# train
215+
for epoch in xrange(training_epochs):
216+
rbm.contrastive_divergence(lr=learning_rate, k=k)
217+
cost = rbm.get_reconstruction_cross_entropy()
218+
print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost
219+
220+
221+
# test
222+
v = numpy.array([[0.5, 0.5, 0., 0., 0., 0.],
223+
[0., 0., 0., 0.5, 0.5, 0.]])
224+
225+
print rbm.reconstruct(v)
226+
227+
171228
if __name__ == "__main__":
172-
test_rbm()
229+
# test_rbm()
230+
test_crbm()

utils.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,26 @@
11
''' '''
22
import numpy
33

4+
45
numpy.seterr(all='ignore')
56

67

78
def sigmoid(x):
8-
# numpy.seterrcall(sigmoid_err_handler) # overflow handling
9-
# numpy.seterr(all='call')
109
return 1. / (1 + numpy.exp(-x))
1110

1211

13-
def sigmoid_err_handler(type, flg):
14-
# Log
15-
return
16-
17-
1812
def softmax(x):
1913
e = numpy.exp(x - numpy.max(x)) # prevent overflow
2014
if e.ndim == 1:
2115
return e / numpy.sum(e, axis=0)
2216
else:
2317
return e / numpy.array([numpy.sum(e, axis=1)]).T # ndim = 2
18+
19+
20+
# # probability density for the Gaussian dist
21+
# def gaussian(x, mean=0.0, scale=1.0):
22+
# s = 2 * numpy.power(scale, 2)
23+
# e = numpy.exp( - numpy.power((x - mean), 2) / s )
24+
25+
# return e / numpy.square(numpy.pi * s)
26+

0 commit comments

Comments
 (0)