From a0117c9bcec3809f54576df3ff8a1c28ec672b49 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Wed, 2 Apr 2025 20:35:29 +0700 Subject: [PATCH] fix: separate gemma3 text and vision example config (#2471) [skip ci] * fix: separate gemma3 text and vision example config * fix: update to use a text-only dataset * fix: typo --- examples/gemma3/gemma-3-4b-qlora.yml | 66 +++++++++++++++++++ ...b-lora.yml => gemma-3-4b-vision-qlora.yml} | 2 +- 2 files changed, 67 insertions(+), 1 deletion(-) create mode 100644 examples/gemma3/gemma-3-4b-qlora.yml rename examples/gemma3/{gemma-3-4b-lora.yml => gemma-3-4b-vision-qlora.yml} (98%) diff --git a/examples/gemma3/gemma-3-4b-qlora.yml b/examples/gemma3/gemma-3-4b-qlora.yml new file mode 100644 index 000000000..28b7bdacf --- /dev/null +++ b/examples/gemma3/gemma-3-4b-qlora.yml @@ -0,0 +1,66 @@ +base_model: google/gemma-3-4b-it +strict: false + +# gemma3 doesn't seem to play nice with ddp +ddp_find_unused_parameters: true + +chat_template: gemma3 +datasets: + - path: cgato/SlimOrcaDedupCleaned + type: chat_template + field_messages: conversations + message_property_mappings: + role: from + content: value + +dataset_prepared_path: last_run_prepared +val_set_size: 0.01 +output_dir: ./outputs/out + +adapter: lora +lora_model_dir: + +sequence_len: 2048 +sample_packing: true +pad_to_sequence_len: true + +lora_r: 32 +lora_alpha: 16 +lora_dropout: 0.05 +lora_target_modules: 'language_model.model.layers.[\d]+.(mlp|cross_attn|self_attn).(up|down|gate|q|k|v|o)_proj' + +wandb_project: +wandb_entity: +wandb_watch: +wandb_name: +wandb_log_model: + +gradient_accumulation_steps: 4 +micro_batch_size: 2 +num_epochs: 1 +optimizer: adamw_bnb_8bit +lr_scheduler: cosine +learning_rate: 0.0002 + +train_on_inputs: false +group_by_length: false +bf16: true +fp16: +tf32: true + +gradient_checkpointing: true +gradient_checkpointing_kwargs: + use_reentrant: false +local_rank: +logging_steps: 1 +flash_attention: true +eager_attention: + +warmup_ratio: 0.1 +evals_per_epoch: 1 +saves_per_epoch: 1 +debug: +deepspeed: +weight_decay: 0.0 +fsdp: +fsdp_config: diff --git a/examples/gemma3/gemma-3-4b-lora.yml b/examples/gemma3/gemma-3-4b-vision-qlora.yml similarity index 98% rename from examples/gemma3/gemma-3-4b-lora.yml rename to examples/gemma3/gemma-3-4b-vision-qlora.yml index 0e7422bd4..d51dd88b1 100644 --- a/examples/gemma3/gemma-3-4b-lora.yml +++ b/examples/gemma3/gemma-3-4b-vision-qlora.yml @@ -20,7 +20,7 @@ dataset_prepared_path: last_run_prepared val_set_size: 0.01 output_dir: ./outputs/out -adapter: lora +adapter: qlora lora_model_dir: sequence_len: 2048