Skip to content

Commit 8e7963e

Browse files
committed
Pep8 / pyflakes
1 parent 0256450 commit 8e7963e

File tree

1 file changed

+30
-25
lines changed

1 file changed

+30
-25
lines changed

code/rnnslu.py

Lines changed: 30 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ def shuffle(lol, seed):
3030
random.seed(seed)
3131
random.shuffle(l)
3232

33+
3334
# start-snippet-1
3435
def contextwin(l, win):
3536
'''
@@ -45,13 +46,14 @@ def contextwin(l, win):
4546
assert win >= 1
4647
l = list(l)
4748

48-
lpadded = win//2 * [-1] + l + win//2 * [-1]
49-
out = [lpadded[i:i+win] for i in range(len(l))]
49+
lpadded = win // 2 * [-1] + l + win // 2 * [-1]
50+
out = [lpadded[i:(i + win)] for i in range(len(l))]
5051

5152
assert len(out) == len(l)
5253
return out
5354
# end-snippet-1
5455

56+
5557
# data loading functions
5658
def atisfold(fold):
5759
assert fold in range(5)
@@ -125,6 +127,7 @@ def get_perf(filename):
125127

126128
return {'p': precision, 'r': recall, 'f1': f1score}
127129

130+
128131
# start-snippet-2
129132
class RNNSLU(object):
130133
''' elman neural net model '''
@@ -199,9 +202,9 @@ def recurrence(x_t, h_tm1):
199202
[T.arange(x.shape[0]), y_sentence])
200203
sentence_gradients = T.grad(sentence_nll, self.params)
201204
sentence_updates = OrderedDict((p, p - lr*g)
202-
# end-snippet-5
203205
for p, g in
204206
zip(self.params, sentence_gradients))
207+
# end-snippet-5
205208

206209
# theano functions to compile
207210
# start-snippet-6
@@ -238,28 +241,28 @@ def load(self, folder):
238241
param.name + '.npy')))
239242

240243

241-
242244
def main(param=None):
243245
if not param:
244-
param = {'fold': 3,
245-
# 5 folds 0,1,2,3,4
246-
'data': 'atis',
247-
'lr': 0.0970806646812754,
248-
'verbose': 1,
249-
'decay': True,
250-
# decay on the learning rate if improvement stops
251-
'win': 7,
252-
# number of words in the context window
253-
'nhidden': 200,
254-
# number of hidden units
255-
'seed': 345,
256-
'emb_dimension': 50,
257-
# dimension of word embedding
258-
'nepochs': 60,
259-
# 60 is recommended
260-
'savemodel': False}
246+
param = {
247+
'fold': 3,
248+
# 5 folds 0,1,2,3,4
249+
'data': 'atis',
250+
'lr': 0.0970806646812754,
251+
'verbose': 1,
252+
'decay': True,
253+
# decay on the learning rate if improvement stops
254+
'win': 7,
255+
# number of words in the context window
256+
'nhidden': 200,
257+
# number of hidden units
258+
'seed': 345,
259+
'emb_dimension': 50,
260+
# dimension of word embedding
261+
'nepochs': 60,
262+
# 60 is recommended
263+
'savemodel': False}
261264
print param
262-
265+
263266
folder = os.path.basename(__file__).split('.')[0]
264267
if not os.path.exists(folder):
265268
os.mkdir(folder)
@@ -308,9 +311,11 @@ def main(param=None):
308311

309312
for i, (x, y) in enumerate(zip(train_lex, train_y)):
310313
rnn.train(x, y, param['win'], param['clr'])
311-
print '[learning] epoch %i >> %2.2f%%'%(e,(i+1)*100./nsentences),'completed in %.2f (sec) <<\r'%(time.time()-tic),
314+
print '[learning] epoch %i >> %2.2f%%' % (
315+
e, (i + 1) * 100. / nsentences),
316+
print 'completed in %.2f (sec) <<\r' % (time.time() - tic),
312317
sys.stdout.flush()
313-
318+
314319
# evaluation // back into the real world : idx -> words
315320
predictions_test = [map(lambda x: idx2label[x],
316321
rnn.classify(numpy.asarray(
@@ -330,7 +335,7 @@ def main(param=None):
330335
groundtruth_valid,
331336
words_valid,
332337
folder + '/current.valid.txt')
333-
338+
334339
if res_valid['f1'] > best_f1:
335340

336341
if param['savemodel']:

0 commit comments

Comments
 (0)