feat: add support for qwen25 vl for multimodal

This commit is contained in:
NanoCode012
2025-02-18 12:42:29 +07:00
parent 2de866e92f
commit fbf3ca86c9

View File

@@ -132,7 +132,7 @@ def normalize_config(cfg):
cfg.is_multimodal = (
hasattr(model_config, "model_type")
and model_config.model_type in ["llava", "mllama", "qwen2_vl"]
and model_config.model_type in ["llava", "mllama", "qwen2_vl", "qwen2_5_vl"]
or any(
multimodal_name in cfg.base_model.lower()
for multimodal_name in [