fix(config): missing config not being documented and fix model_ override (#2317)

* fix(config): missing config not being documented and fix model_ space override

* fix: delete redundant field
This commit is contained in:
NanoCode012
2025-02-08 18:01:48 +07:00
committed by GitHub
parent fd8cb32547
commit 526e5ee8b8
3 changed files with 9 additions and 4 deletions

View File

@@ -46,6 +46,10 @@ overrides_of_model_config:
type: # linear | dynamic
factor: # float
# optional overrides the base model loading from_pretrained
overrides_of_model_kwargs:
# use_cache: False
# optional overrides to the bnb 4bit quantization configuration
# https://huggingface.co/docs/transformers/main/main_classes/quantization#transformers.BitsAndBytesConfig
bnb_config_kwargs:

View File

@@ -115,6 +115,9 @@ class RemappedParameters(BaseModel):
overrides_of_model_config: Optional[Dict[str, Any]] = Field(
default=None, alias="model_config"
)
overrides_of_model_kwargs: Optional[Dict[str, Any]] = Field(
default=None, alias="model_kwargs"
)
type_of_model: Optional[str] = Field(default=None, alias="model_type")
revision_of_model: Optional[str] = Field(default=None, alias="model_revision")
@@ -426,8 +429,6 @@ class ModelInputConfig(BaseModel):
)
trust_remote_code: Optional[bool] = None
model_kwargs: Optional[Dict[str, Any]] = None
@field_validator("trust_remote_code")
@classmethod
def hint_trust_remote_code(cls, trust_remote_code):

View File

@@ -357,8 +357,8 @@ class ModelLoader:
# init model kwargs
self.model_kwargs: Dict[str, Any] = {}
if cfg.model_kwargs:
for key, val in cfg.model_kwargs.items():
if cfg.overrides_of_model_kwargs:
for key, val in cfg.overrides_of_model_kwargs.items():
self.model_kwargs[key] = val
# init model