Skip to content

Commit d3f1b6e

Browse files
committed
fix config
1 parent 48ee4b1 commit d3f1b6e

File tree

3 files changed

+6
-4
lines changed

3 files changed

+6
-4
lines changed

neural_ner/config.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,8 @@ class Config(object):
5353

5454
config.is_l2_loss = False
5555

56-
config.model_name = 'model.NER_SOFTMAX_CHAR'
57-
config.optimizer = 'sgd'
56+
config.model_name = 'model.NER_SOFTMAX_CHAR_CRF'
57+
config.optimizer = 'sgd_mom'
5858

5959
config.use_pretrain_embd = True
6060

neural_ner/model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -130,8 +130,8 @@ def forward(self, batch):
130130
return emissions
131131

132132
def get_loss(self, logits, y, s_lens):
133-
#loss = -1 * self.crf.log_likelihood(logits, y)
134-
loss = self.crf.structural_perceptron_loss(logits, y)
133+
loss = -1 * self.crf.log_likelihood(logits, y)
134+
#loss = self.crf.structural_perceptron_loss(logits, y)
135135
loss = loss / s_lens.float()
136136
loss = loss.mean()
137137
if self.config.is_l2_loss:

neural_ner/model_utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@ def get_optimizer(model, config):
3333
if config.optimizer == 'adam':
3434
optimizer = Adam(params, amsgrad=True)
3535
elif config.optimizer == 'sgd':
36+
optimizer = SGD(params, lr=0.01)
37+
elif config.optimizer == 'sgd_mom':
3638
optimizer = SGD(params, lr=0.01, momentum=0.9)
3739

3840
num_params = sum(p.numel() for p in params)

0 commit comments

Comments
 (0)