handle load_model splat
This commit is contained in:
@@ -2,6 +2,7 @@ base_model: mistralai/Mistral-7B-v0.1
|
||||
model_type: MistralForCausalLM
|
||||
tokenizer_type: LlamaTokenizer
|
||||
is_mistral_derived_model: true
|
||||
multimodal: true
|
||||
|
||||
vision_tower: openai/clip-vit-large-patch14
|
||||
tune_mm_mlp_adapter: true
|
||||
|
||||
@@ -43,7 +43,7 @@ def do_cli(config: Path = Path("examples/"), **kwargs):
|
||||
parsed_cfg.dataset_prepared_path = DEFAULT_DATASET_PREPARED_PATH
|
||||
|
||||
tokenizer = load_tokenizer(parsed_cfg)
|
||||
model = load_model(parsed_cfg, tokenizer)
|
||||
model, _ = load_model(parsed_cfg, tokenizer)
|
||||
dataset_meta = load_mm_dataset(
|
||||
cfg=parsed_cfg, cli_args=parsed_cli_args, model=model
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user