don't resize embeddings if it's already large enough (#577)
* don't resize embeddings if it's already large enough * make sure to tie weights, even if we aren't resizing
This commit is contained in:
@@ -302,7 +302,10 @@ def load_model(
|
||||
if cfg.resize_token_embeddings_to_32x
|
||||
else len(tokenizer)
|
||||
)
|
||||
model.resize_token_embeddings(embeddings_len)
|
||||
if model.get_input_embeddings().num_embeddings < embeddings_len:
|
||||
model.resize_token_embeddings(embeddings_len)
|
||||
else:
|
||||
model.tie_weights()
|
||||
|
||||
if (
|
||||
hasattr(model.config, "max_position_embeddings")
|
||||
|
||||
Reference in New Issue
Block a user