@@ -80,6 +80,9 @@ def _p(pp, name):
80
80
81
81
82
82
def init_params (options ):
83
+ """
84
+ Global (not LSTM) parameter. For the embeding and the classifier.
85
+ """
83
86
params = OrderedDict ()
84
87
# embedding
85
88
randn = numpy .random .rand (options ['n_words' ],
@@ -125,6 +128,11 @@ def ortho_weight(ndim):
125
128
126
129
127
130
def param_init_lstm (options , params , prefix = 'lstm' ):
131
+ """
132
+ Init the LSTM parameter:
133
+
134
+ :see: init_params
135
+ """
128
136
W = numpy .concatenate ([ortho_weight (options ['dim_proj' ]),
129
137
ortho_weight (options ['dim_proj' ]),
130
138
ortho_weight (options ['dim_proj' ]),
@@ -388,6 +396,7 @@ def test_lstm(
388
396
noise_std = 0. ,
389
397
use_dropout = True , # if False slightly faster, but worst test error
390
398
# This frequently need a bigger model.
399
+ reload_model = "" , # Path to a saved model we want to start from.
391
400
):
392
401
393
402
# Model options
@@ -407,6 +416,9 @@ def test_lstm(
407
416
# Dict name (string) -> numpy ndarray
408
417
params = init_params (model_options )
409
418
419
+ if reload_model :
420
+ load_params ('lstm_model.npz' , params )
421
+
410
422
# This create Theano Shared Variable from the parameters.
411
423
# Dict name (string) -> Theano Tensor Shared Variable
412
424
# params and tparams have different copy of the weights.
@@ -561,4 +573,7 @@ def test_lstm(
561
573
theano .config .scan .allow_gc = False
562
574
563
575
# See function train for all possible parameter and there definition.
564
- test_lstm (max_epochs = 10 )
576
+ test_lstm (
577
+ #reload_model="lstm_model.npz",
578
+ max_epochs = 10 ,
579
+ )
0 commit comments