don't resize embeddings if it's already large enough (#577)

* don't resize embeddings if it's already large enough

* make sure to tie weights, even if we aren't resizing
This commit is contained in:
Wing Lian
2023-09-15 15:47:09 -04:00
committed by GitHub
parent 12a2dbbc2c
commit 360788296a

View File

@@ -302,7 +302,10 @@ def load_model(
if cfg.resize_token_embeddings_to_32x
else len(tokenizer)
)
model.resize_token_embeddings(embeddings_len)
if model.get_input_embeddings().num_embeddings < embeddings_len:
model.resize_token_embeddings(embeddings_len)
else:
model.tie_weights()
if (
hasattr(model.config, "max_position_embeddings")