Skip to content

Commit 88cc26d

Browse files
authored
Ignore unexpected weights from PT conversion (huggingface#10397)
1 parent 63645b3 commit 88cc26d

File tree

1 file changed

+5
-1
lines changed

1 file changed

+5
-1
lines changed

src/transformers/models/bert/modeling_tf_bert.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -919,7 +919,11 @@ def serving_output(self, output: TFBaseModelOutputWithPooling) -> TFBaseModelOut
919919
)
920920
class TFBertForPreTraining(TFBertPreTrainedModel, TFBertPreTrainingLoss):
921921
# names with a '.' represents the authorized unexpected/missing layers when a TF model is loaded from a PT model
922-
_keys_to_ignore_on_load_unexpected = [r"cls.predictions.decoder.weight"]
922+
_keys_to_ignore_on_load_unexpected = [
923+
r"position_ids",
924+
r"cls.predictions.decoder.weight",
925+
r"cls.predictions.decoder.bias",
926+
]
923927

924928
def __init__(self, config: BertConfig, *inputs, **kwargs):
925929
super().__init__(config, *inputs, **kwargs)

0 commit comments

Comments
 (0)