Fix(model loading): Warn when model revision is passed to gptq (#364)

* fix(model loading): warn when model revision is passed to gptq

* chore: improve message
This commit is contained in:
NanoCode012
2023-08-13 01:16:59 +09:00
committed by GitHub
parent e37d9358e6
commit 96bd6ae1c4

View File

@@ -97,6 +97,13 @@ def validate_config(cfg):
"push_to_hub_model_id is deprecated. Please use hub_model_id instead."
)
if cfg.gptq and cfg.model_revision:
raise ValueError(
"model_revision is not supported for GPTQ models. "
+ "Please download the model from HuggingFace Hub manually for correct branch, "
+ "point to its path, and remove model_revision from the config."
)
# TODO
# MPT 7b
# https://github.com/facebookresearch/bitsandbytes/issues/25