Built site for gh-pages

This commit is contained in:
Quarto GHA Workflow Runner
2025-06-12 17:25:50 +00:00
parent f465e840cc
commit eac3a4860e
8 changed files with 501 additions and 702 deletions

View File

@@ -29,12 +29,12 @@ jobs:
- cuda: 126
cuda_version: 12.6.3
python_version: "3.11"
pytorch: 2.7.0
pytorch: 2.7.1
axolotl_extras:
- cuda: 128
cuda_version: 12.8.1
python_version: "3.11"
pytorch: 2.7.0
pytorch: 2.7.1
axolotl_extras:
runs-on: axolotl-gpu-runner
steps:
@@ -97,12 +97,12 @@ jobs:
- cuda: 126
cuda_version: 12.6.3
python_version: "3.11"
pytorch: 2.7.0
pytorch: 2.7.1
axolotl_extras:
- cuda: 128
cuda_version: 12.8.1
python_version: "3.11"
pytorch: 2.7.0
pytorch: 2.7.1
axolotl_extras:
runs-on: axolotl-gpu-runner
steps:

View File

@@ -1 +1 @@
91cd29d0
72083c85

View File

@@ -547,43 +547,44 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true});
<span id="cb1-10"><a href="#cb1-10" aria-hidden="true" tabindex="-1"></a> sample_packing_bin_size<span class="op">=</span><span class="dv">200</span>,</span>
<span id="cb1-11"><a href="#cb1-11" aria-hidden="true" tabindex="-1"></a> sample_packing_group_size<span class="op">=</span><span class="dv">100000</span>,</span>
<span id="cb1-12"><a href="#cb1-12" aria-hidden="true" tabindex="-1"></a> max_seq_length<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb1-13"><a href="#cb1-13" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-14"><a href="#cb1-14" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-15"><a href="#cb1-15" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-16"><a href="#cb1-16" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb1-17"><a href="#cb1-17" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb1-18"><a href="#cb1-18" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb1-19"><a href="#cb1-19" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb1-20"><a href="#cb1-20" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb1-21"><a href="#cb1-21" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-22"><a href="#cb1-22" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb1-23"><a href="#cb1-23" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-24"><a href="#cb1-24" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-25"><a href="#cb1-25" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-26"><a href="#cb1-26" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-27"><a href="#cb1-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb1-28"><a href="#cb1-28" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-29"><a href="#cb1-29" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-30"><a href="#cb1-30" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-31"><a href="#cb1-31" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb1-32"><a href="#cb1-32" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-33"><a href="#cb1-33" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-34"><a href="#cb1-34" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-35"><a href="#cb1-35" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-36"><a href="#cb1-36" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-37"><a href="#cb1-37" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-38"><a href="#cb1-38" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-39"><a href="#cb1-39" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-40"><a href="#cb1-40" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb1-41"><a href="#cb1-41" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb1-42"><a href="#cb1-42" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-43"><a href="#cb1-43" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-44"><a href="#cb1-44" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-45"><a href="#cb1-45" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-46"><a href="#cb1-46" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-47"><a href="#cb1-47" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-48"><a href="#cb1-48" aria-hidden="true" tabindex="-1"></a> simpo_gamma<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-49"><a href="#cb1-49" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<span id="cb1-13"><a href="#cb1-13" aria-hidden="true" tabindex="-1"></a> dataset_num_proc<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-14"><a href="#cb1-14" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-15"><a href="#cb1-15" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-16"><a href="#cb1-16" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-17"><a href="#cb1-17" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb1-18"><a href="#cb1-18" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb1-19"><a href="#cb1-19" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb1-20"><a href="#cb1-20" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb1-21"><a href="#cb1-21" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb1-22"><a href="#cb1-22" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-23"><a href="#cb1-23" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb1-24"><a href="#cb1-24" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-25"><a href="#cb1-25" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-26"><a href="#cb1-26" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-27"><a href="#cb1-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-28"><a href="#cb1-28" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb1-29"><a href="#cb1-29" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-30"><a href="#cb1-30" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-31"><a href="#cb1-31" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-32"><a href="#cb1-32" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb1-33"><a href="#cb1-33" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-34"><a href="#cb1-34" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-35"><a href="#cb1-35" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-36"><a href="#cb1-36" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-37"><a href="#cb1-37" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-38"><a href="#cb1-38" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-39"><a href="#cb1-39" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-40"><a href="#cb1-40" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-41"><a href="#cb1-41" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb1-42"><a href="#cb1-42" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb1-43"><a href="#cb1-43" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-44"><a href="#cb1-44" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-45"><a href="#cb1-45" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-46"><a href="#cb1-46" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-47"><a href="#cb1-47" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-48"><a href="#cb1-48" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-49"><a href="#cb1-49" aria-hidden="true" tabindex="-1"></a> simpo_gamma<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb1-50"><a href="#cb1-50" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<p>CPO config for CPO training</p>
</section>
<section id="axolotl.core.training_args.AxolotlKTOConfig" class="level3">
@@ -600,42 +601,43 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true});
<span id="cb2-10"><a href="#cb2-10" aria-hidden="true" tabindex="-1"></a> sample_packing_bin_size<span class="op">=</span><span class="dv">200</span>,</span>
<span id="cb2-11"><a href="#cb2-11" aria-hidden="true" tabindex="-1"></a> sample_packing_group_size<span class="op">=</span><span class="dv">100000</span>,</span>
<span id="cb2-12"><a href="#cb2-12" aria-hidden="true" tabindex="-1"></a> max_seq_length<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb2-13"><a href="#cb2-13" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-14"><a href="#cb2-14" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-15"><a href="#cb2-15" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-16"><a href="#cb2-16" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb2-17"><a href="#cb2-17" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb2-18"><a href="#cb2-18" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb2-19"><a href="#cb2-19" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb2-20"><a href="#cb2-20" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb2-21"><a href="#cb2-21" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-22"><a href="#cb2-22" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb2-23"><a href="#cb2-23" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-24"><a href="#cb2-24" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-25"><a href="#cb2-25" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-26"><a href="#cb2-26" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-27"><a href="#cb2-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb2-28"><a href="#cb2-28" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-29"><a href="#cb2-29" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-30"><a href="#cb2-30" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-31"><a href="#cb2-31" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb2-32"><a href="#cb2-32" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-33"><a href="#cb2-33" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-34"><a href="#cb2-34" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-35"><a href="#cb2-35" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-36"><a href="#cb2-36" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-37"><a href="#cb2-37" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-38"><a href="#cb2-38" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-39"><a href="#cb2-39" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-40"><a href="#cb2-40" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb2-41"><a href="#cb2-41" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb2-42"><a href="#cb2-42" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-43"><a href="#cb2-43" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-44"><a href="#cb2-44" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-45"><a href="#cb2-45" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-46"><a href="#cb2-46" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-47"><a href="#cb2-47" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-48"><a href="#cb2-48" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<span id="cb2-13"><a href="#cb2-13" aria-hidden="true" tabindex="-1"></a> dataset_num_proc<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-14"><a href="#cb2-14" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-15"><a href="#cb2-15" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-16"><a href="#cb2-16" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-17"><a href="#cb2-17" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb2-18"><a href="#cb2-18" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb2-19"><a href="#cb2-19" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb2-20"><a href="#cb2-20" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb2-21"><a href="#cb2-21" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb2-22"><a href="#cb2-22" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-23"><a href="#cb2-23" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb2-24"><a href="#cb2-24" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-25"><a href="#cb2-25" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-26"><a href="#cb2-26" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-27"><a href="#cb2-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-28"><a href="#cb2-28" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb2-29"><a href="#cb2-29" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-30"><a href="#cb2-30" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-31"><a href="#cb2-31" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-32"><a href="#cb2-32" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb2-33"><a href="#cb2-33" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-34"><a href="#cb2-34" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-35"><a href="#cb2-35" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-36"><a href="#cb2-36" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-37"><a href="#cb2-37" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-38"><a href="#cb2-38" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-39"><a href="#cb2-39" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-40"><a href="#cb2-40" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-41"><a href="#cb2-41" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb2-42"><a href="#cb2-42" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb2-43"><a href="#cb2-43" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-44"><a href="#cb2-44" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-45"><a href="#cb2-45" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-46"><a href="#cb2-46" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-47"><a href="#cb2-47" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-48"><a href="#cb2-48" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb2-49"><a href="#cb2-49" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<p>KTO config for KTO training</p>
</section>
<section id="axolotl.core.training_args.AxolotlORPOConfig" class="level3">
@@ -652,42 +654,43 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true});
<span id="cb3-10"><a href="#cb3-10" aria-hidden="true" tabindex="-1"></a> sample_packing_bin_size<span class="op">=</span><span class="dv">200</span>,</span>
<span id="cb3-11"><a href="#cb3-11" aria-hidden="true" tabindex="-1"></a> sample_packing_group_size<span class="op">=</span><span class="dv">100000</span>,</span>
<span id="cb3-12"><a href="#cb3-12" aria-hidden="true" tabindex="-1"></a> max_seq_length<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb3-13"><a href="#cb3-13" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-14"><a href="#cb3-14" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-15"><a href="#cb3-15" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-16"><a href="#cb3-16" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb3-17"><a href="#cb3-17" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb3-18"><a href="#cb3-18" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb3-19"><a href="#cb3-19" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb3-20"><a href="#cb3-20" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb3-21"><a href="#cb3-21" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-22"><a href="#cb3-22" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb3-23"><a href="#cb3-23" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-24"><a href="#cb3-24" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-25"><a href="#cb3-25" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-26"><a href="#cb3-26" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-27"><a href="#cb3-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb3-28"><a href="#cb3-28" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-29"><a href="#cb3-29" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-30"><a href="#cb3-30" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-31"><a href="#cb3-31" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb3-32"><a href="#cb3-32" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-33"><a href="#cb3-33" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-34"><a href="#cb3-34" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-35"><a href="#cb3-35" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-36"><a href="#cb3-36" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-37"><a href="#cb3-37" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-38"><a href="#cb3-38" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-39"><a href="#cb3-39" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-40"><a href="#cb3-40" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb3-41"><a href="#cb3-41" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb3-42"><a href="#cb3-42" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-43"><a href="#cb3-43" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-44"><a href="#cb3-44" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-45"><a href="#cb3-45" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-46"><a href="#cb3-46" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-47"><a href="#cb3-47" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-48"><a href="#cb3-48" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<span id="cb3-13"><a href="#cb3-13" aria-hidden="true" tabindex="-1"></a> dataset_num_proc<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-14"><a href="#cb3-14" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-15"><a href="#cb3-15" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-16"><a href="#cb3-16" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-17"><a href="#cb3-17" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb3-18"><a href="#cb3-18" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb3-19"><a href="#cb3-19" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb3-20"><a href="#cb3-20" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb3-21"><a href="#cb3-21" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb3-22"><a href="#cb3-22" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-23"><a href="#cb3-23" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb3-24"><a href="#cb3-24" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-25"><a href="#cb3-25" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-26"><a href="#cb3-26" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-27"><a href="#cb3-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-28"><a href="#cb3-28" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb3-29"><a href="#cb3-29" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-30"><a href="#cb3-30" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-31"><a href="#cb3-31" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-32"><a href="#cb3-32" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb3-33"><a href="#cb3-33" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-34"><a href="#cb3-34" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-35"><a href="#cb3-35" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-36"><a href="#cb3-36" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-37"><a href="#cb3-37" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-38"><a href="#cb3-38" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-39"><a href="#cb3-39" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-40"><a href="#cb3-40" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-41"><a href="#cb3-41" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb3-42"><a href="#cb3-42" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb3-43"><a href="#cb3-43" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-44"><a href="#cb3-44" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-45"><a href="#cb3-45" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-46"><a href="#cb3-46" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-47"><a href="#cb3-47" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-48"><a href="#cb3-48" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb3-49"><a href="#cb3-49" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<p>ORPO config for ORPO training</p>
</section>
<section id="axolotl.core.training_args.AxolotlPRMConfig" class="level3">
@@ -704,42 +707,43 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true});
<span id="cb4-10"><a href="#cb4-10" aria-hidden="true" tabindex="-1"></a> sample_packing_bin_size<span class="op">=</span><span class="dv">200</span>,</span>
<span id="cb4-11"><a href="#cb4-11" aria-hidden="true" tabindex="-1"></a> sample_packing_group_size<span class="op">=</span><span class="dv">100000</span>,</span>
<span id="cb4-12"><a href="#cb4-12" aria-hidden="true" tabindex="-1"></a> max_seq_length<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb4-13"><a href="#cb4-13" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-14"><a href="#cb4-14" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-15"><a href="#cb4-15" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-16"><a href="#cb4-16" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb4-17"><a href="#cb4-17" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb4-18"><a href="#cb4-18" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb4-19"><a href="#cb4-19" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb4-20"><a href="#cb4-20" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb4-21"><a href="#cb4-21" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-22"><a href="#cb4-22" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb4-23"><a href="#cb4-23" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-24"><a href="#cb4-24" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-25"><a href="#cb4-25" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-26"><a href="#cb4-26" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-27"><a href="#cb4-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb4-28"><a href="#cb4-28" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-29"><a href="#cb4-29" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-30"><a href="#cb4-30" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-31"><a href="#cb4-31" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb4-32"><a href="#cb4-32" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-33"><a href="#cb4-33" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-34"><a href="#cb4-34" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-35"><a href="#cb4-35" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-36"><a href="#cb4-36" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-37"><a href="#cb4-37" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-38"><a href="#cb4-38" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-39"><a href="#cb4-39" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-40"><a href="#cb4-40" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb4-41"><a href="#cb4-41" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb4-42"><a href="#cb4-42" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-43"><a href="#cb4-43" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-44"><a href="#cb4-44" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-45"><a href="#cb4-45" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-46"><a href="#cb4-46" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-47"><a href="#cb4-47" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-48"><a href="#cb4-48" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<span id="cb4-13"><a href="#cb4-13" aria-hidden="true" tabindex="-1"></a> dataset_num_proc<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-14"><a href="#cb4-14" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-15"><a href="#cb4-15" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-16"><a href="#cb4-16" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-17"><a href="#cb4-17" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb4-18"><a href="#cb4-18" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb4-19"><a href="#cb4-19" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb4-20"><a href="#cb4-20" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb4-21"><a href="#cb4-21" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb4-22"><a href="#cb4-22" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-23"><a href="#cb4-23" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb4-24"><a href="#cb4-24" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-25"><a href="#cb4-25" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-26"><a href="#cb4-26" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-27"><a href="#cb4-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-28"><a href="#cb4-28" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb4-29"><a href="#cb4-29" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-30"><a href="#cb4-30" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-31"><a href="#cb4-31" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-32"><a href="#cb4-32" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb4-33"><a href="#cb4-33" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-34"><a href="#cb4-34" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-35"><a href="#cb4-35" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-36"><a href="#cb4-36" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-37"><a href="#cb4-37" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-38"><a href="#cb4-38" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-39"><a href="#cb4-39" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-40"><a href="#cb4-40" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-41"><a href="#cb4-41" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb4-42"><a href="#cb4-42" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb4-43"><a href="#cb4-43" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-44"><a href="#cb4-44" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-45"><a href="#cb4-45" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-46"><a href="#cb4-46" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-47"><a href="#cb4-47" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-48"><a href="#cb4-48" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb4-49"><a href="#cb4-49" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<p>PRM config for PRM training</p>
</section>
<section id="axolotl.core.training_args.AxolotlRewardConfig" class="level3">
@@ -756,42 +760,43 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true});
<span id="cb5-10"><a href="#cb5-10" aria-hidden="true" tabindex="-1"></a> sample_packing_bin_size<span class="op">=</span><span class="dv">200</span>,</span>
<span id="cb5-11"><a href="#cb5-11" aria-hidden="true" tabindex="-1"></a> sample_packing_group_size<span class="op">=</span><span class="dv">100000</span>,</span>
<span id="cb5-12"><a href="#cb5-12" aria-hidden="true" tabindex="-1"></a> max_seq_length<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb5-13"><a href="#cb5-13" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-14"><a href="#cb5-14" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-15"><a href="#cb5-15" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-16"><a href="#cb5-16" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb5-17"><a href="#cb5-17" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb5-18"><a href="#cb5-18" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb5-19"><a href="#cb5-19" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb5-20"><a href="#cb5-20" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb5-21"><a href="#cb5-21" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-22"><a href="#cb5-22" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb5-23"><a href="#cb5-23" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-24"><a href="#cb5-24" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-25"><a href="#cb5-25" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-26"><a href="#cb5-26" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-27"><a href="#cb5-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb5-28"><a href="#cb5-28" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-29"><a href="#cb5-29" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-30"><a href="#cb5-30" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-31"><a href="#cb5-31" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb5-32"><a href="#cb5-32" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-33"><a href="#cb5-33" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-34"><a href="#cb5-34" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-35"><a href="#cb5-35" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-36"><a href="#cb5-36" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-37"><a href="#cb5-37" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-38"><a href="#cb5-38" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-39"><a href="#cb5-39" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-40"><a href="#cb5-40" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb5-41"><a href="#cb5-41" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb5-42"><a href="#cb5-42" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-43"><a href="#cb5-43" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-44"><a href="#cb5-44" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-45"><a href="#cb5-45" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-46"><a href="#cb5-46" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-47"><a href="#cb5-47" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-48"><a href="#cb5-48" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<span id="cb5-13"><a href="#cb5-13" aria-hidden="true" tabindex="-1"></a> dataset_num_proc<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-14"><a href="#cb5-14" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-15"><a href="#cb5-15" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-16"><a href="#cb5-16" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-17"><a href="#cb5-17" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb5-18"><a href="#cb5-18" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb5-19"><a href="#cb5-19" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb5-20"><a href="#cb5-20" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb5-21"><a href="#cb5-21" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb5-22"><a href="#cb5-22" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-23"><a href="#cb5-23" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb5-24"><a href="#cb5-24" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-25"><a href="#cb5-25" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-26"><a href="#cb5-26" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-27"><a href="#cb5-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-28"><a href="#cb5-28" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb5-29"><a href="#cb5-29" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-30"><a href="#cb5-30" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-31"><a href="#cb5-31" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-32"><a href="#cb5-32" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb5-33"><a href="#cb5-33" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-34"><a href="#cb5-34" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-35"><a href="#cb5-35" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-36"><a href="#cb5-36" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-37"><a href="#cb5-37" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-38"><a href="#cb5-38" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-39"><a href="#cb5-39" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-40"><a href="#cb5-40" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-41"><a href="#cb5-41" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb5-42"><a href="#cb5-42" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb5-43"><a href="#cb5-43" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-44"><a href="#cb5-44" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-45"><a href="#cb5-45" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-46"><a href="#cb5-46" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-47"><a href="#cb5-47" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-48"><a href="#cb5-48" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb5-49"><a href="#cb5-49" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<p>Reward config for Reward training</p>
</section>
<section id="axolotl.core.training_args.AxolotlTrainingArguments" class="level3">
@@ -808,42 +813,43 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true});
<span id="cb6-10"><a href="#cb6-10" aria-hidden="true" tabindex="-1"></a> sample_packing_bin_size<span class="op">=</span><span class="dv">200</span>,</span>
<span id="cb6-11"><a href="#cb6-11" aria-hidden="true" tabindex="-1"></a> sample_packing_group_size<span class="op">=</span><span class="dv">100000</span>,</span>
<span id="cb6-12"><a href="#cb6-12" aria-hidden="true" tabindex="-1"></a> max_seq_length<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb6-13"><a href="#cb6-13" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-14"><a href="#cb6-14" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-15"><a href="#cb6-15" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-16"><a href="#cb6-16" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb6-17"><a href="#cb6-17" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb6-18"><a href="#cb6-18" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb6-19"><a href="#cb6-19" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb6-20"><a href="#cb6-20" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb6-21"><a href="#cb6-21" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-22"><a href="#cb6-22" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb6-23"><a href="#cb6-23" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-24"><a href="#cb6-24" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-25"><a href="#cb6-25" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-26"><a href="#cb6-26" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-27"><a href="#cb6-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb6-28"><a href="#cb6-28" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-29"><a href="#cb6-29" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-30"><a href="#cb6-30" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-31"><a href="#cb6-31" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb6-32"><a href="#cb6-32" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-33"><a href="#cb6-33" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-34"><a href="#cb6-34" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-35"><a href="#cb6-35" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-36"><a href="#cb6-36" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-37"><a href="#cb6-37" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-38"><a href="#cb6-38" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-39"><a href="#cb6-39" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-40"><a href="#cb6-40" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb6-41"><a href="#cb6-41" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb6-42"><a href="#cb6-42" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-43"><a href="#cb6-43" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-44"><a href="#cb6-44" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-45"><a href="#cb6-45" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-46"><a href="#cb6-46" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-47"><a href="#cb6-47" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-48"><a href="#cb6-48" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<span id="cb6-13"><a href="#cb6-13" aria-hidden="true" tabindex="-1"></a> dataset_num_proc<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-14"><a href="#cb6-14" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-15"><a href="#cb6-15" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-16"><a href="#cb6-16" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-17"><a href="#cb6-17" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb6-18"><a href="#cb6-18" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb6-19"><a href="#cb6-19" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb6-20"><a href="#cb6-20" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb6-21"><a href="#cb6-21" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb6-22"><a href="#cb6-22" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-23"><a href="#cb6-23" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb6-24"><a href="#cb6-24" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-25"><a href="#cb6-25" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-26"><a href="#cb6-26" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-27"><a href="#cb6-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-28"><a href="#cb6-28" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb6-29"><a href="#cb6-29" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-30"><a href="#cb6-30" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-31"><a href="#cb6-31" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-32"><a href="#cb6-32" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb6-33"><a href="#cb6-33" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-34"><a href="#cb6-34" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-35"><a href="#cb6-35" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-36"><a href="#cb6-36" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-37"><a href="#cb6-37" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-38"><a href="#cb6-38" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-39"><a href="#cb6-39" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-40"><a href="#cb6-40" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-41"><a href="#cb6-41" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb6-42"><a href="#cb6-42" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb6-43"><a href="#cb6-43" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-44"><a href="#cb6-44" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-45"><a href="#cb6-45" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-46"><a href="#cb6-46" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-47"><a href="#cb6-47" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-48"><a href="#cb6-48" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb6-49"><a href="#cb6-49" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<p>Training arguments for Causal trainer</p>
<p>This code is duplicated due to HF TrainingArguments not setting output_dir with a
default value so it cant be used as a mixin.</p>
@@ -862,42 +868,43 @@ default value so it cant be used as a mixin.</p>
<span id="cb7-10"><a href="#cb7-10" aria-hidden="true" tabindex="-1"></a> sample_packing_bin_size<span class="op">=</span><span class="dv">200</span>,</span>
<span id="cb7-11"><a href="#cb7-11" aria-hidden="true" tabindex="-1"></a> sample_packing_group_size<span class="op">=</span><span class="dv">100000</span>,</span>
<span id="cb7-12"><a href="#cb7-12" aria-hidden="true" tabindex="-1"></a> max_seq_length<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb7-13"><a href="#cb7-13" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-14"><a href="#cb7-14" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-15"><a href="#cb7-15" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-16"><a href="#cb7-16" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb7-17"><a href="#cb7-17" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb7-18"><a href="#cb7-18" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb7-19"><a href="#cb7-19" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb7-20"><a href="#cb7-20" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb7-21"><a href="#cb7-21" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-22"><a href="#cb7-22" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb7-23"><a href="#cb7-23" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-24"><a href="#cb7-24" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-25"><a href="#cb7-25" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-26"><a href="#cb7-26" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-27"><a href="#cb7-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb7-28"><a href="#cb7-28" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-29"><a href="#cb7-29" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-30"><a href="#cb7-30" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-31"><a href="#cb7-31" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb7-32"><a href="#cb7-32" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-33"><a href="#cb7-33" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-34"><a href="#cb7-34" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-35"><a href="#cb7-35" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-36"><a href="#cb7-36" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-37"><a href="#cb7-37" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-38"><a href="#cb7-38" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-39"><a href="#cb7-39" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-40"><a href="#cb7-40" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb7-41"><a href="#cb7-41" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb7-42"><a href="#cb7-42" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-43"><a href="#cb7-43" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-44"><a href="#cb7-44" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-45"><a href="#cb7-45" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-46"><a href="#cb7-46" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-47"><a href="#cb7-47" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-48"><a href="#cb7-48" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<span id="cb7-13"><a href="#cb7-13" aria-hidden="true" tabindex="-1"></a> dataset_num_proc<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-14"><a href="#cb7-14" aria-hidden="true" tabindex="-1"></a> relora_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-15"><a href="#cb7-15" aria-hidden="true" tabindex="-1"></a> relora_warmup_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-16"><a href="#cb7-16" aria-hidden="true" tabindex="-1"></a> relora_anneal_steps<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-17"><a href="#cb7-17" aria-hidden="true" tabindex="-1"></a> relora_prune_ratio<span class="op">=</span><span class="fl">0.9</span>,</span>
<span id="cb7-18"><a href="#cb7-18" aria-hidden="true" tabindex="-1"></a> bench_split<span class="op">=</span><span class="st">'eval'</span>,</span>
<span id="cb7-19"><a href="#cb7-19" aria-hidden="true" tabindex="-1"></a> bench_dataset<span class="op">=</span><span class="st">'pharaouk/dharma-1/dharma_1_mini.json'</span>,</span>
<span id="cb7-20"><a href="#cb7-20" aria-hidden="true" tabindex="-1"></a> do_bench_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb7-21"><a href="#cb7-21" aria-hidden="true" tabindex="-1"></a> do_causal_lm_eval<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb7-22"><a href="#cb7-22" aria-hidden="true" tabindex="-1"></a> max_bench_samples<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-23"><a href="#cb7-23" aria-hidden="true" tabindex="-1"></a> bench_source_max_len<span class="op">=</span><span class="dv">2048</span>,</span>
<span id="cb7-24"><a href="#cb7-24" aria-hidden="true" tabindex="-1"></a> dataloader_prefetch_factor<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-25"><a href="#cb7-25" aria-hidden="true" tabindex="-1"></a> cosine_min_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-26"><a href="#cb7-26" aria-hidden="true" tabindex="-1"></a> cosine_constant_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-27"><a href="#cb7-27" aria-hidden="true" tabindex="-1"></a> loraplus_lr_ratio<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-28"><a href="#cb7-28" aria-hidden="true" tabindex="-1"></a> loraplus_lr_embedding<span class="op">=</span><span class="fl">1e-06</span>,</span>
<span id="cb7-29"><a href="#cb7-29" aria-hidden="true" tabindex="-1"></a> embedding_lr_scale<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-30"><a href="#cb7-30" aria-hidden="true" tabindex="-1"></a> lr_groups<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-31"><a href="#cb7-31" aria-hidden="true" tabindex="-1"></a> embedding_lr<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-32"><a href="#cb7-32" aria-hidden="true" tabindex="-1"></a> qlora<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb7-33"><a href="#cb7-33" aria-hidden="true" tabindex="-1"></a> orpo_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-34"><a href="#cb7-34" aria-hidden="true" tabindex="-1"></a> lisa_n_layers<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-35"><a href="#cb7-35" aria-hidden="true" tabindex="-1"></a> lisa_step_interval<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-36"><a href="#cb7-36" aria-hidden="true" tabindex="-1"></a> lisa_layers_attribute<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-37"><a href="#cb7-37" aria-hidden="true" tabindex="-1"></a> curriculum_sampling<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-38"><a href="#cb7-38" aria-hidden="true" tabindex="-1"></a> alternate_lr_scheduler_type<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-39"><a href="#cb7-39" aria-hidden="true" tabindex="-1"></a> chat_template<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-40"><a href="#cb7-40" aria-hidden="true" tabindex="-1"></a> kd_ce_alpha<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-41"><a href="#cb7-41" aria-hidden="true" tabindex="-1"></a> kd_alpha<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb7-42"><a href="#cb7-42" aria-hidden="true" tabindex="-1"></a> kd_temperature<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb7-43"><a href="#cb7-43" aria-hidden="true" tabindex="-1"></a> kd_zscore_base_temp<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-44"><a href="#cb7-44" aria-hidden="true" tabindex="-1"></a> kd_top_k_before_softmax<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-45"><a href="#cb7-45" aria-hidden="true" tabindex="-1"></a> adam_beta3<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-46"><a href="#cb7-46" aria-hidden="true" tabindex="-1"></a> adam_epsilon2<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-47"><a href="#cb7-47" aria-hidden="true" tabindex="-1"></a> image_size<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-48"><a href="#cb7-48" aria-hidden="true" tabindex="-1"></a> image_resize_algorithm<span class="op">=</span><span class="va">None</span>,</span>
<span id="cb7-49"><a href="#cb7-49" aria-hidden="true" tabindex="-1"></a>)</span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<p>Mixin class for the Axolotl training args.</p>

View File

@@ -520,7 +520,7 @@ into fixed-capacity batches to optimize memory usage and training throughput.</p
<span id="cb1-5"><a href="#cb1-5" aria-hidden="true" tabindex="-1"></a> lengths,</span>
<span id="cb1-6"><a href="#cb1-6" aria-hidden="true" tabindex="-1"></a> packing_efficiency_estimate<span class="op">=</span><span class="fl">1.0</span>,</span>
<span id="cb1-7"><a href="#cb1-7" aria-hidden="true" tabindex="-1"></a> drop_last<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb1-8"><a href="#cb1-8" aria-hidden="true" tabindex="-1"></a> num_count_samples<span class="op">=</span><span class="dv">16</span>,</span>
<span id="cb1-8"><a href="#cb1-8" aria-hidden="true" tabindex="-1"></a> num_count_samples<span class="op">=</span><span class="dv">8</span>,</span>
<span id="cb1-9"><a href="#cb1-9" aria-hidden="true" tabindex="-1"></a> sequential<span class="op">=</span><span class="va">False</span>,</span>
<span id="cb1-10"><a href="#cb1-10" aria-hidden="true" tabindex="-1"></a> group_size<span class="op">=</span><span class="dv">100000</span>,</span>
<span id="cb1-11"><a href="#cb1-11" aria-hidden="true" tabindex="-1"></a> bin_size<span class="op">=</span><span class="dv">200</span>,</span>

View File

@@ -512,7 +512,7 @@ and the QAT documentation in the <a href="https://github.com/pytorch/ao/tree/mai
<span id="cb1-3"><a href="#cb1-3" aria-hidden="true" tabindex="-1"></a><span class="at"> </span><span class="fu">weight_dtype</span><span class="kw">:</span><span class="co"> # Optional[str] = "int8". Fake quantization layout to use for weight quantization. Valid options are "int4" and "int8"</span></span>
<span id="cb1-4"><a href="#cb1-4" aria-hidden="true" tabindex="-1"></a><span class="at"> </span><span class="fu">group_size</span><span class="kw">:</span><span class="co"> # Optional[int] = 32. The number of elements in each group for per-group fake quantization</span></span>
<span id="cb1-5"><a href="#cb1-5" aria-hidden="true" tabindex="-1"></a><span class="at"> </span><span class="fu">fake_quant_after_n_steps</span><span class="kw">:</span><span class="co"> # Optional[int] = None. The number of steps to apply fake quantization after</span></span></code><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></pre></div>
<p>Once you have finished training, you must quantize your model by using the same quantization configuration which you used to train the model with. You can use the <a href="./quantize.md"><code>quantize</code> command</a> to do this.</p>
<p>Once you have finished training, you must quantize your model by using the same quantization configuration which you used to train the model with. You can use the <a href="../docs/quantize.html"><code>quantize</code></a> command to do this.</p>
</section>

View File

@@ -467,16 +467,16 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true});
<h2 id="toc-title">On this page</h2>
<ul>
<li><a href="#quick-start" id="toc-quick-start" class="nav-link active" data-scroll-target="#quick-start">🚀 Quick Start</a>
<li><a href="#latest-updates" id="toc-latest-updates" class="nav-link active" data-scroll-target="#latest-updates">🎉 Latest Updates</a></li>
<li><a href="#overview" id="toc-overview" class="nav-link" data-scroll-target="#overview">✨ Overview</a></li>
<li><a href="#quick-start" id="toc-quick-start" class="nav-link" data-scroll-target="#quick-start">🚀 Quick Start</a>
<ul class="collapse">
<li><a href="#installation" id="toc-installation" class="nav-link" data-scroll-target="#installation">Installation</a></li>
<li><a href="#your-first-fine-tune" id="toc-your-first-fine-tune" class="nav-link" data-scroll-target="#your-first-fine-tune">Your First Fine-tune</a></li>
</ul></li>
<li><a href="#key-features" id="toc-key-features" class="nav-link" data-scroll-target="#key-features">✨ Key Features</a></li>
<li><a href="#documentation" id="toc-documentation" class="nav-link" data-scroll-target="#documentation">📚 Documentation</a></li>
<li><a href="#getting-help" id="toc-getting-help" class="nav-link" data-scroll-target="#getting-help">🤝 Getting Help</a></li>
<li><a href="#contributing" id="toc-contributing" class="nav-link" data-scroll-target="#contributing">🌟 Contributing</a></li>
<li><a href="#supported-models" id="toc-supported-models" class="nav-link" data-scroll-target="#supported-models">Supported Models</a></li>
<li><a href="#sponsors" id="toc-sponsors" class="nav-link" data-scroll-target="#sponsors">❤️ Sponsors</a></li>
<li><a href="#license" id="toc-license" class="nav-link" data-scroll-target="#license">📜 License</a></li>
</ul>
@@ -510,27 +510,31 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true});
<img src="https://github.com/axolotl-ai-cloud/axolotl/actions/workflows/tests-nightly.yml/badge.svg" alt="tests-nightly">
<img src="https://github.com/axolotl-ai-cloud/axolotl/actions/workflows/multi-gpu-e2e.yml/badge.svg" alt="multigpu-semi-weekly tests">
</p>
<p>Axolotl is a tool designed to streamline post-training for various AI models.
Post-training refers to any modifications or additional training performed on
pre-trained models - including full model fine-tuning, parameter-efficient tuning (like
LoRA and QLoRA), supervised fine-tuning (SFT), instruction tuning, and alignment
techniques. With support for multiple model architectures and training configurations,
Axolotl makes it easy to get started with these techniques.</p>
<p>Axolotl is designed to work with YAML config files that contain everything you need to
preprocess a dataset, train or fine-tune a model, run model inference or evaluation,
and much more.</p>
<section id="latest-updates" class="level2">
<h2 class="anchored" data-anchor-id="latest-updates">🎉 Latest Updates</h2>
<ul>
<li>2025/05: Quantization Aware Training (QAT) support has been added to Axolotl. Explore the <a href="https://docs.axolotl.ai/docs/qat.html">docs</a> to learn more!</li>
<li>2025/04: Llama 4 support has been added in Axolotl. See <a href="https://github.com/axolotl-ai-cloud/axolotl/tree/main/examples/llama-4">examples</a> to start training your own Llama 4 models with Axolotls linearized version!</li>
<li>2025/03: Axolotl has implemented Sequence Parallelism (SP) support. Read the <a href="https://huggingface.co/blog/axolotl-ai-co/long-context-with-sequence-parallelism-in-axolotl">blog</a> and <a href="https://docs.axolotl.ai/docs/sequence_parallelism.html">docs</a> to learn how to scale your context length when fine-tuning.</li>
<li>2025/03: (Beta) Fine-tuning Multimodal models is now supported in Axolotl. Check out the <a href="https://docs.axolotl.ai/docs/multimodal.html">docs</a> to fine-tune your own!</li>
<li>2025/02: Axolotl has added LoRA optimizations to reduce memory usage and improve training speed for LoRA and QLoRA in single GPU and multi-GPU training (DDP and DeepSpeed). Jump into the <a href="https://docs.axolotl.ai/docs/lora_optims.html">docs</a> to give it a try.</li>
<li>2025/02: Axolotl has added GRPO support. Dive into our <a href="https://huggingface.co/blog/axolotl-ai-co/training-llms-w-interpreter-feedback-wasm">blog</a> and <a href="https://github.com/axolotl-ai-cloud/grpo_code">GRPO example</a> and have some fun!</li>
<li>2025/01: Axolotl has added Reward Modelling / Process Reward Modelling fine-tuning support. See <a href="https://docs.axolotl.ai/docs/reward_modelling.html">docs</a>.</li>
</ul>
</section>
<section id="overview" class="level2">
<h2 class="anchored" data-anchor-id="overview">✨ Overview</h2>
<p>Axolotl is a tool designed to streamline post-training for various AI models.</p>
<p>Features:</p>
<ul>
<li>Train various Huggingface models such as llama, pythia, falcon, mpt</li>
<li>Supports fullfinetune, lora, qlora, relora, and gptq</li>
<li>Customize configurations using a simple yaml file or CLI overwrite</li>
<li>Load different dataset formats, use custom formats, or bring your own tokenized datasets</li>
<li>Integrated with <a href="https://github.com/facebookresearch/xformers">xformers</a>, flash attention, <a href="https://github.com/linkedin/Liger-Kernel">liger kernel</a>, rope scaling, and multipacking</li>
<li>Works with single GPU or multiple GPUs via FSDP or Deepspeed</li>
<li>Easily run with Docker locally or on the cloud</li>
<li>Log results and optionally checkpoints to wandb, mlflow or Comet</li>
<li>And more!</li>
<li><strong>Multiple Model Support</strong>: Train various models like LLaMA, Mistral, Mixtral, Pythia, and more. We are compatible with HuggingFace transformers causal language models.</li>
<li><strong>Training Methods</strong>: Full fine-tuning, LoRA, QLoRA, GPTQ, QAT, Preference Tuning (DPO, IPO, KTO, ORPO), RL (GRPO), Multimodal, and Reward Modelling (RM) / Process Reward Modelling (PRM).</li>
<li><strong>Easy Configuration</strong>: Re-use a single YAML file between dataset preprocess, training, evaluation, quantization, and inference.</li>
<li><strong>Performance Optimizations</strong>: <a href="https://docs.axolotl.ai/docs/multipack.html">Multipacking</a>, <a href="https://github.com/Dao-AILab/flash-attention">Flash Attention</a>, <a href="https://github.com/facebookresearch/xformers">Xformers</a>, <a href="https://pytorch.org/blog/flexattention/">Flex Attention</a>, <a href="https://github.com/linkedin/Liger-Kernel">Liger Kernel</a>, <a href="https://github.com/apple/ml-cross-entropy/tree/main">Cut Cross Entropy</a>, Sequence Parallelism (SP), LoRA optimizations, Multi-GPU training (FSDP1, FSDP2, DeepSpeed), Multi-node training (Torchrun, Ray), and many more!</li>
<li><strong>Flexible Dataset Handling</strong>: Load from local, HuggingFace, and cloud (S3, Azure, GCP, OCI) datasets.</li>
<li><strong>Cloud Ready</strong>: We ship <a href="https://hub.docker.com/u/axolotlai">Docker images</a> and also <a href="https://pypi.org/project/axolotl/">PyPI packages</a> for use on cloud platforms and local hardware.</li>
</ul>
</section>
<section id="quick-start" class="level2">
<h2 class="anchored" data-anchor-id="quick-start">🚀 Quick Start</h2>
<p><strong>Requirements</strong>:</p>
@@ -562,22 +566,12 @@ and much more.</p>
<p>Thats it! Check out our <a href="https://docs.axolotl.ai/docs/getting-started.html">Getting Started Guide</a> for a more detailed walkthrough.</p>
</section>
</section>
<section id="key-features" class="level2">
<h2 class="anchored" data-anchor-id="key-features">✨ Key Features</h2>
<ul>
<li><strong>Multiple Model Support</strong>: Train various models like LLaMA, Mistral, Mixtral, Pythia, and more</li>
<li><strong>Training Methods</strong>: Full fine-tuning, LoRA, QLoRA, and more</li>
<li><strong>Easy Configuration</strong>: Simple YAML files to control your training setup</li>
<li><strong>Performance Optimizations</strong>: Flash Attention, xformers, multi-GPU training</li>
<li><strong>Flexible Dataset Handling</strong>: Use various formats and custom datasets</li>
<li><strong>Cloud Ready</strong>: Run on cloud platforms or local hardware</li>
</ul>
</section>
<section id="documentation" class="level2">
<h2 class="anchored" data-anchor-id="documentation">📚 Documentation</h2>
<ul>
<li><a href="https://docs.axolotl.ai/docs/installation.html">Installation Options</a> - Detailed setup instructions for different environments</li>
<li><a href="https://docs.axolotl.ai/docs/config.html">Configuration Guide</a> - Full configuration options and examples</li>
<li><a href="https://docs.axolotl.ai/docs/dataset_loading.html">Dataset Loading</a> - Loading datasets from various sources</li>
<li><a href="https://docs.axolotl.ai/docs/dataset-formats/">Dataset Guide</a> - Supported formats and how to use them</li>
<li><a href="https://docs.axolotl.ai/docs/multi-gpu.html">Multi-GPU Training</a></li>
<li><a href="https://docs.axolotl.ai/docs/multi-node.html">Multi-Node Training</a></li>
@@ -599,198 +593,6 @@ and much more.</p>
<h2 class="anchored" data-anchor-id="contributing">🌟 Contributing</h2>
<p>Contributions are welcome! Please see our <a href="https://github.com/axolotl-ai-cloud/axolotl/blob/main/.github/CONTRIBUTING.md">Contributing Guide</a> for details.</p>
</section>
<section id="supported-models" class="level2">
<h2 class="anchored" data-anchor-id="supported-models">Supported Models</h2>
<table class="caption-top table">
<colgroup>
<col style="width: 14%">
<col style="width: 12%">
<col style="width: 6%">
<col style="width: 7%">
<col style="width: 6%">
<col style="width: 21%">
<col style="width: 13%">
<col style="width: 15%">
</colgroup>
<thead>
<tr class="header">
<th></th>
<th style="text-align: left;">fp16/fp32</th>
<th style="text-align: left;">lora</th>
<th>qlora</th>
<th>gptq</th>
<th>gptq w/flash attn</th>
<th>flash attn</th>
<th>xformers attn</th>
</tr>
</thead>
<tbody>
<tr class="odd">
<td>llama</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="even">
<td>Mistral</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="odd">
<td>Mixtral-MoE</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="even">
<td>Mixtral8X22</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="odd">
<td>Pythia</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="even">
<td>cerebras</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="odd">
<td>btlm</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="even">
<td>mpt</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="odd">
<td>falcon</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="even">
<td>gpt-j</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="odd">
<td>XGen</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="even">
<td>phi</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="odd">
<td>RWKV</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="even">
<td>Qwen</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="odd">
<td>Gemma</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr class="even">
<td>Jamba</td>
<td style="text-align: left;"></td>
<td style="text-align: left;"></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
</tbody>
</table>
<p>✅: supported
❌: not supported
❓: untested</p>
</section>
<section id="sponsors" class="level2">
<h2 class="anchored" data-anchor-id="sponsors">❤️ Sponsors</h2>
<p>Thank you to our sponsors who help make Axolotl possible:</p>

File diff suppressed because one or more lines are too long

View File

@@ -2,758 +2,758 @@
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url>
<loc>https://docs.axolotl.ai/docs/unsloth.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/dataset-formats/conversation.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.997Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/dataset-formats/stepwise_supervised.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.997Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/dataset-formats/tokenized.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.998Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/mac.html</loc>
<lastmod>2025-06-11T21:11:16.398Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/nccl.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/multi-node.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/docker.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.998Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/lr_groups.html</loc>
<lastmod>2025-06-11T21:11:16.398Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/inference.html</loc>
<lastmod>2025-06-11T21:11:16.398Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/cli.html</loc>
<lastmod>2025-06-11T21:11:16.393Z</lastmod>
<lastmod>2025-06-12T17:23:38.997Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/faq.html</loc>
<lastmod>2025-06-11T21:11:16.395Z</lastmod>
<lastmod>2025-06-12T17:23:38.998Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/getting-started.html</loc>
<lastmod>2025-06-11T21:11:16.395Z</lastmod>
<lastmod>2025-06-12T17:23:38.998Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/custom_integrations.html</loc>
<lastmod>2025-06-11T21:11:16.393Z</lastmod>
<lastmod>2025-06-12T17:23:38.997Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/fsdp_qlora.html</loc>
<lastmod>2025-06-11T21:11:16.395Z</lastmod>
<lastmod>2025-06-12T17:23:38.998Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/common.const.html</loc>
<lastmod>2025-06-11T21:11:47.171Z</lastmod>
<lastmod>2025-06-12T17:24:09.577Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_tokenizers.html</loc>
<lastmod>2025-06-11T21:11:45.864Z</lastmod>
<lastmod>2025-06-12T17:24:08.286Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.dpo.user_defined.html</loc>
<lastmod>2025-06-11T21:11:46.505Z</lastmod>
<lastmod>2025-06-12T17:24:08.913Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.training_args.html</loc>
<lastmod>2025-06-11T21:11:45.994Z</lastmod>
<lastmod>2025-06-12T17:24:08.405Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.user_defined.html</loc>
<lastmod>2025-06-11T21:11:46.429Z</lastmod>
<lastmod>2025-06-12T17:24:08.838Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.dict.html</loc>
<lastmod>2025-06-11T21:11:46.902Z</lastmod>
<lastmod>2025-06-12T17:24:09.309Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.unsloth_.html</loc>
<lastmod>2025-06-11T21:11:46.766Z</lastmod>
<lastmod>2025-06-12T17:24:09.174Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.collators.mamba.html</loc>
<lastmod>2025-06-11T21:11:47.211Z</lastmod>
<lastmod>2025-06-12T17:24:09.616Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.mixins.optimizer.html</loc>
<lastmod>2025-06-11T21:11:46.335Z</lastmod>
<lastmod>2025-06-12T17:24:08.745Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.train.html</loc>
<lastmod>2025-06-11T21:11:46.074Z</lastmod>
<lastmod>2025-06-12T17:24:08.485Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.messages.chat.html</loc>
<lastmod>2025-06-11T21:11:46.479Z</lastmod>
<lastmod>2025-06-12T17:24:08.888Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.chat.format.llama3x.html</loc>
<lastmod>2025-06-11T21:11:46.020Z</lastmod>
<lastmod>2025-06-12T17:24:08.431Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/loaders.processor.html</loc>
<lastmod>2025-06-11T21:11:46.315Z</lastmod>
<lastmod>2025-06-12T17:24:08.725Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.datasets.transforms.chat_builder.html</loc>
<lastmod>2025-06-11T21:11:46.034Z</lastmod>
<lastmod>2025-06-12T17:24:08.445Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.mamba.html</loc>
<lastmod>2025-06-11T21:11:46.260Z</lastmod>
<lastmod>2025-06-12T17:24:08.670Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.gradient_checkpointing.offload_cpu.html</loc>
<lastmod>2025-06-11T21:11:46.773Z</lastmod>
<lastmod>2025-06-12T17:24:09.180Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/models.mamba.modeling_mamba.html</loc>
<lastmod>2025-06-11T21:11:47.187Z</lastmod>
<lastmod>2025-06-12T17:24:09.592Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.relora.html</loc>
<lastmod>2025-06-11T21:11:46.264Z</lastmod>
<lastmod>2025-06-12T17:24:08.674Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.builders.causal.html</loc>
<lastmod>2025-06-11T21:11:45.884Z</lastmod>
<lastmod>2025-06-12T17:24:08.307Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.chat.messages.html</loc>
<lastmod>2025-06-11T21:11:46.017Z</lastmod>
<lastmod>2025-06-12T17:24:08.428Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/integrations.lm_eval.args.html</loc>
<lastmod>2025-06-11T21:11:47.165Z</lastmod>
<lastmod>2025-06-12T17:24:09.570Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.quantize.html</loc>
<lastmod>2025-06-11T21:11:46.228Z</lastmod>
<lastmod>2025-06-12T17:24:08.638Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.checks.html</loc>
<lastmod>2025-06-11T21:11:46.108Z</lastmod>
<lastmod>2025-06-12T17:24:08.519Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.kto.llama3.html</loc>
<lastmod>2025-06-11T21:11:46.514Z</lastmod>
<lastmod>2025-06-12T17:24:08.923Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/kernels.lora.html</loc>
<lastmod>2025-06-11T21:11:46.632Z</lastmod>
<lastmod>2025-06-12T17:24:09.040Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schemas.multimodal.html</loc>
<lastmod>2025-06-11T21:11:46.997Z</lastmod>
<lastmod>2025-06-12T17:24:09.404Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/loaders.adapter.html</loc>
<lastmod>2025-06-11T21:11:46.320Z</lastmod>
<lastmod>2025-06-12T17:24:08.730Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/index.html</loc>
<lastmod>2025-06-11T21:11:45.724Z</lastmod>
<lastmod>2025-06-12T17:24:08.148Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.llama_patch_multipack.html</loc>
<lastmod>2025-06-11T21:11:46.749Z</lastmod>
<lastmod>2025-06-12T17:24:09.157Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/train.html</loc>
<lastmod>2025-06-11T21:11:45.786Z</lastmod>
<lastmod>2025-06-12T17:24:08.210Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.mixtral.html</loc>
<lastmod>2025-06-11T21:11:46.769Z</lastmod>
<lastmod>2025-06-12T17:24:09.177Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.dpo.chatml.html</loc>
<lastmod>2025-06-11T21:11:46.502Z</lastmod>
<lastmod>2025-06-12T17:24:08.910Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/integrations.grokfast.optimizer.html</loc>
<lastmod>2025-06-11T21:11:47.150Z</lastmod>
<lastmod>2025-06-12T17:24:09.556Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.samplers.multipack.html</loc>
<lastmod>2025-06-11T21:11:47.256Z</lastmod>
<lastmod>2025-06-12T17:24:09.661Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.alpaca_chat.html</loc>
<lastmod>2025-06-11T21:11:46.408Z</lastmod>
<lastmod>2025-06-12T17:24:08.817Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.llama_expand_mask.html</loc>
<lastmod>2025-06-11T21:11:46.713Z</lastmod>
<lastmod>2025-06-12T17:24:09.121Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/common.architectures.html</loc>
<lastmod>2025-06-11T21:11:47.169Z</lastmod>
<lastmod>2025-06-12T17:24:09.575Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schemas.utils.html</loc>
<lastmod>2025-06-11T21:11:47.025Z</lastmod>
<lastmod>2025-06-12T17:24:09.432Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.chat_templates.html</loc>
<lastmod>2025-06-11T21:11:46.816Z</lastmod>
<lastmod>2025-06-12T17:24:09.223Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.callbacks.comet_.html</loc>
<lastmod>2025-06-11T21:11:47.274Z</lastmod>
<lastmod>2025-06-12T17:24:09.680Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.main.html</loc>
<lastmod>2025-06-11T21:11:46.066Z</lastmod>
<lastmod>2025-06-12T17:24:08.476Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.grpo.trainer.html</loc>
<lastmod>2025-06-11T21:11:46.282Z</lastmod>
<lastmod>2025-06-12T17:24:08.692Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.callbacks.mlflow_.html</loc>
<lastmod>2025-06-11T21:11:47.271Z</lastmod>
<lastmod>2025-06-12T17:24:09.676Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/loaders.model.html</loc>
<lastmod>2025-06-11T21:11:46.305Z</lastmod>
<lastmod>2025-06-12T17:24:08.715Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.tokenization.html</loc>
<lastmod>2025-06-11T21:11:46.806Z</lastmod>
<lastmod>2025-06-12T17:24:09.213Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.kto.chatml.html</loc>
<lastmod>2025-06-11T21:11:46.522Z</lastmod>
<lastmod>2025-06-12T17:24:08.931Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.model_shard_quant.html</loc>
<lastmod>2025-06-11T21:11:46.826Z</lastmod>
<lastmod>2025-06-12T17:24:09.234Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.mixins.scheduler.html</loc>
<lastmod>2025-06-11T21:11:46.346Z</lastmod>
<lastmod>2025-06-12T17:24:08.755Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.chat.format.chatml.html</loc>
<lastmod>2025-06-11T21:11:46.019Z</lastmod>
<lastmod>2025-06-12T17:24:08.429Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.dpo.passthrough.html</loc>
<lastmod>2025-06-11T21:11:46.506Z</lastmod>
<lastmod>2025-06-12T17:24:08.915Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.orpo.chat_template.html</loc>
<lastmod>2025-06-11T21:11:46.544Z</lastmod>
<lastmod>2025-06-12T17:24:08.953Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.multipack.html</loc>
<lastmod>2025-06-11T21:11:46.704Z</lastmod>
<lastmod>2025-06-12T17:24:09.113Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.base.html</loc>
<lastmod>2025-06-11T21:11:46.370Z</lastmod>
<lastmod>2025-06-12T17:24:08.780Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.grpo.sampler.html</loc>
<lastmod>2025-06-11T21:11:46.294Z</lastmod>
<lastmod>2025-06-12T17:24:08.704Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.collators.batching.html</loc>
<lastmod>2025-06-11T21:11:47.207Z</lastmod>
<lastmod>2025-06-12T17:24:09.613Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.lora_kernels.html</loc>
<lastmod>2025-06-11T21:11:46.738Z</lastmod>
<lastmod>2025-06-12T17:24:09.146Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/integrations.kd.trainer.html</loc>
<lastmod>2025-06-11T21:11:47.158Z</lastmod>
<lastmod>2025-06-12T17:24:09.564Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schemas.enums.html</loc>
<lastmod>2025-06-11T21:11:47.020Z</lastmod>
<lastmod>2025-06-12T17:24:09.426Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/datasets.html</loc>
<lastmod>2025-06-11T21:11:45.808Z</lastmod>
<lastmod>2025-06-12T17:24:08.232Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.dpo.zephyr.html</loc>
<lastmod>2025-06-11T21:11:46.503Z</lastmod>
<lastmod>2025-06-12T17:24:08.912Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.data.batch_dataset_fetcher.html</loc>
<lastmod>2025-06-11T21:11:46.768Z</lastmod>
<lastmod>2025-06-12T17:24:09.176Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schemas.model.html</loc>
<lastmod>2025-06-11T21:11:46.956Z</lastmod>
<lastmod>2025-06-12T17:24:09.364Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/integrations.cut_cross_entropy.args.html</loc>
<lastmod>2025-06-11T21:11:47.149Z</lastmod>
<lastmod>2025-06-12T17:24:09.555Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.trainer.html</loc>
<lastmod>2025-06-11T21:11:46.854Z</lastmod>
<lastmod>2025-06-12T17:24:09.262Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.callbacks.lisa.html</loc>
<lastmod>2025-06-11T21:11:47.267Z</lastmod>
<lastmod>2025-06-12T17:24:09.673Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.data.pretraining.html</loc>
<lastmod>2025-06-11T21:11:46.911Z</lastmod>
<lastmod>2025-06-12T17:24:09.318Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.callbacks.profiler.html</loc>
<lastmod>2025-06-11T21:11:47.266Z</lastmod>
<lastmod>2025-06-12T17:24:09.671Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.metharme.html</loc>
<lastmod>2025-06-11T21:11:46.465Z</lastmod>
<lastmod>2025-06-12T17:24:08.874Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.collators.core.html</loc>
<lastmod>2025-06-11T21:11:47.189Z</lastmod>
<lastmod>2025-06-12T17:24:09.594Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.stablelm_attn_hijack_flash.html</loc>
<lastmod>2025-06-11T21:11:46.755Z</lastmod>
<lastmod>2025-06-12T17:24:09.163Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.alpaca_w_system.html</loc>
<lastmod>2025-06-11T21:11:46.421Z</lastmod>
<lastmod>2025-06-12T17:24:08.830Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.lora.html</loc>
<lastmod>2025-06-11T21:11:46.820Z</lastmod>
<lastmod>2025-06-12T17:24:09.228Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/qat.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/quantize.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/ray-integration.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/sequence_parallelism.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/reward_modelling.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/index.html</loc>
<lastmod>2025-06-11T21:11:16.413Z</lastmod>
<lastmod>2025-06-12T17:23:39.014Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/src/axolotl/integrations/LICENSE.html</loc>
<lastmod>2025-06-11T21:11:16.416Z</lastmod>
<lastmod>2025-06-12T17:23:39.018Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/FAQS.html</loc>
<lastmod>2025-06-11T21:11:16.390Z</lastmod>
<lastmod>2025-06-12T17:23:38.995Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/src/axolotl/integrations/cut_cross_entropy/ACKNOWLEDGEMENTS.html</loc>
<lastmod>2025-06-11T21:11:16.417Z</lastmod>
<lastmod>2025-06-12T17:23:39.018Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/TODO.html</loc>
<lastmod>2025-06-11T21:11:16.391Z</lastmod>
<lastmod>2025-06-12T17:23:38.996Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/examples/colab-notebooks/colab-axolotl-example.html</loc>
<lastmod>2025-06-11T21:11:16.400Z</lastmod>
<lastmod>2025-06-12T17:23:39.002Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/torchao.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/config.html</loc>
<lastmod>2025-06-11T21:11:16.393Z</lastmod>
<lastmod>2025-06-12T17:23:38.997Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/input_output.html</loc>
<lastmod>2025-06-11T21:11:16.398Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/batch_vs_grad.html</loc>
<lastmod>2025-06-11T21:11:16.393Z</lastmod>
<lastmod>2025-06-12T17:23:38.997Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.quantization.html</loc>
<lastmod>2025-06-11T21:11:46.938Z</lastmod>
<lastmod>2025-06-12T17:24:09.346Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.bench.html</loc>
<lastmod>2025-06-11T21:11:46.830Z</lastmod>
<lastmod>2025-06-12T17:24:09.237Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/loaders.tokenizer.html</loc>
<lastmod>2025-06-11T21:11:46.313Z</lastmod>
<lastmod>2025-06-12T17:24:08.723Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.freeze.html</loc>
<lastmod>2025-06-11T21:11:46.837Z</lastmod>
<lastmod>2025-06-12T17:24:09.245Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.orcamini.html</loc>
<lastmod>2025-06-11T21:11:46.469Z</lastmod>
<lastmod>2025-06-12T17:24:08.878Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schemas.training.html</loc>
<lastmod>2025-06-11T21:11:46.961Z</lastmod>
<lastmod>2025-06-12T17:24:09.369Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/integrations.spectrum.args.html</loc>
<lastmod>2025-06-11T21:11:47.168Z</lastmod>
<lastmod>2025-06-12T17:24:09.574Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.ctx_managers.sequence_parallel.html</loc>
<lastmod>2025-06-11T21:11:46.369Z</lastmod>
<lastmod>2025-06-12T17:24:08.778Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.inference.html</loc>
<lastmod>2025-06-11T21:11:46.140Z</lastmod>
<lastmod>2025-06-12T17:24:08.551Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/logging_config.html</loc>
<lastmod>2025-06-11T21:11:45.873Z</lastmod>
<lastmod>2025-06-12T17:24:08.296Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/loaders.constants.html</loc>
<lastmod>2025-06-11T21:11:46.330Z</lastmod>
<lastmod>2025-06-12T17:24:08.740Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.dpo.chat_template.html</loc>
<lastmod>2025-06-11T21:11:46.481Z</lastmod>
<lastmod>2025-06-12T17:24:08.890Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.args.html</loc>
<lastmod>2025-06-11T21:11:46.102Z</lastmod>
<lastmod>2025-06-12T17:24:08.512Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schemas.trl.html</loc>
<lastmod>2025-06-11T21:11:46.991Z</lastmod>
<lastmod>2025-06-12T17:24:09.399Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.pygmalion.html</loc>
<lastmod>2025-06-11T21:11:46.475Z</lastmod>
<lastmod>2025-06-12T17:24:08.884Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/convert.html</loc>
<lastmod>2025-06-11T21:11:45.821Z</lastmod>
<lastmod>2025-06-12T17:24:08.245Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.base.html</loc>
<lastmod>2025-06-11T21:11:46.238Z</lastmod>
<lastmod>2025-06-12T17:24:08.649Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.preprocess.html</loc>
<lastmod>2025-06-11T21:11:46.169Z</lastmod>
<lastmod>2025-06-12T17:24:08.579Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.config.html</loc>
<lastmod>2025-06-11T21:11:46.126Z</lastmod>
<lastmod>2025-06-12T17:24:08.537Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.relora.html</loc>
<lastmod>2025-06-11T21:11:46.711Z</lastmod>
<lastmod>2025-06-12T17:24:09.119Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.chat.format.shared.html</loc>
<lastmod>2025-06-11T21:11:46.022Z</lastmod>
<lastmod>2025-06-12T17:24:08.432Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.dpo.trainer.html</loc>
<lastmod>2025-06-11T21:11:46.271Z</lastmod>
<lastmod>2025-06-12T17:24:08.681Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.callbacks.qat.html</loc>
<lastmod>2025-06-11T21:11:47.281Z</lastmod>
<lastmod>2025-06-12T17:24:09.686Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.optimizers.adopt.html</loc>
<lastmod>2025-06-11T21:11:46.909Z</lastmod>
<lastmod>2025-06-12T17:24:09.317Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.evaluate.html</loc>
<lastmod>2025-06-11T21:11:46.082Z</lastmod>
<lastmod>2025-06-12T17:24:08.493Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.trl.html</loc>
<lastmod>2025-06-11T21:11:46.255Z</lastmod>
<lastmod>2025-06-12T17:24:08.665Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.builders.base.html</loc>
<lastmod>2025-06-11T21:11:45.880Z</lastmod>
<lastmod>2025-06-12T17:24:08.302Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.trainer_fsdp_optim.html</loc>
<lastmod>2025-06-11T21:11:46.758Z</lastmod>
<lastmod>2025-06-12T17:24:09.166Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.gradient_checkpointing.offload_disk.html</loc>
<lastmod>2025-06-11T21:11:46.799Z</lastmod>
<lastmod>2025-06-12T17:24:09.206Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.distributed.html</loc>
<lastmod>2025-06-11T21:11:46.898Z</lastmod>
<lastmod>2025-06-12T17:24:09.306Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.input_output.html</loc>
<lastmod>2025-06-11T21:11:46.454Z</lastmod>
<lastmod>2025-06-12T17:24:08.862Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schemas.config.html</loc>
<lastmod>2025-06-11T21:11:46.949Z</lastmod>
<lastmod>2025-06-12T17:24:09.357Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.utils.html</loc>
<lastmod>2025-06-11T21:11:46.207Z</lastmod>
<lastmod>2025-06-12T17:24:08.617Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.callbacks.perplexity.html</loc>
<lastmod>2025-06-11T21:11:47.262Z</lastmod>
<lastmod>2025-06-12T17:24:09.667Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schemas.integrations.html</loc>
<lastmod>2025-06-11T21:11:47.009Z</lastmod>
<lastmod>2025-06-12T17:24:09.416Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/loaders.patch_manager.html</loc>
<lastmod>2025-06-11T21:11:46.328Z</lastmod>
<lastmod>2025-06-12T17:24:08.738Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.utils.html</loc>
<lastmod>2025-06-11T21:11:46.746Z</lastmod>
<lastmod>2025-06-12T17:24:09.154Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.vllm_serve.html</loc>
<lastmod>2025-06-11T21:11:46.214Z</lastmod>
<lastmod>2025-06-12T17:24:08.624Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.mistral_attn_hijack_flash.html</loc>
<lastmod>2025-06-11T21:11:46.703Z</lastmod>
<lastmod>2025-06-12T17:24:09.111Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/integrations.liger.args.html</loc>
<lastmod>2025-06-11T21:11:47.161Z</lastmod>
<lastmod>2025-06-12T17:24:09.567Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.alpaca_instruct.html</loc>
<lastmod>2025-06-11T21:11:46.410Z</lastmod>
<lastmod>2025-06-12T17:24:08.818Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.data.sft.html</loc>
<lastmod>2025-06-11T21:11:46.918Z</lastmod>
<lastmod>2025-06-12T17:24:09.325Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.collators.mm_chat.html</loc>
<lastmod>2025-06-11T21:11:47.216Z</lastmod>
<lastmod>2025-06-12T17:24:09.621Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.llama2_chat.html</loc>
<lastmod>2025-06-11T21:11:46.442Z</lastmod>
<lastmod>2025-06-12T17:24:08.850Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.transformers_fa_utils.html</loc>
<lastmod>2025-06-11T21:11:46.765Z</lastmod>
<lastmod>2025-06-12T17:24:09.173Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.merge_sharded_fsdp_weights.html</loc>
<lastmod>2025-06-11T21:11:46.161Z</lastmod>
<lastmod>2025-06-12T17:24:08.571Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.merge_lora.html</loc>
<lastmod>2025-06-11T21:11:46.148Z</lastmod>
<lastmod>2025-06-12T17:24:08.559Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/integrations.base.html</loc>
<lastmod>2025-06-11T21:11:47.146Z</lastmod>
<lastmod>2025-06-12T17:24:09.551Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.mixins.rng_state_loader.html</loc>
<lastmod>2025-06-11T21:11:46.339Z</lastmod>
<lastmod>2025-06-12T17:24:08.749Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.llama_attn_hijack_flash.html</loc>
<lastmod>2025-06-11T21:11:46.687Z</lastmod>
<lastmod>2025-06-12T17:24:09.095Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/kernels.quantize.html</loc>
<lastmod>2025-06-11T21:11:46.660Z</lastmod>
<lastmod>2025-06-12T17:24:09.068Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/evaluate.html</loc>
<lastmod>2025-06-11T21:11:45.797Z</lastmod>
<lastmod>2025-06-12T17:24:08.221Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.builders.rl.html</loc>
<lastmod>2025-06-11T21:11:45.892Z</lastmod>
<lastmod>2025-06-12T17:24:08.315Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schemas.datasets.html</loc>
<lastmod>2025-06-11T21:11:46.979Z</lastmod>
<lastmod>2025-06-12T17:24:09.387Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/common.datasets.html</loc>
<lastmod>2025-06-11T21:11:47.186Z</lastmod>
<lastmod>2025-06-12T17:24:09.591Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/kernels.utils.html</loc>
<lastmod>2025-06-11T21:11:46.662Z</lastmod>
<lastmod>2025-06-12T17:24:09.069Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.completion.html</loc>
<lastmod>2025-06-11T21:11:46.448Z</lastmod>
<lastmod>2025-06-12T17:24:08.856Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.bradley_terry.llama3.html</loc>
<lastmod>2025-06-11T21:11:46.548Z</lastmod>
<lastmod>2025-06-12T17:24:08.957Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.stepwise_supervised.html</loc>
<lastmod>2025-06-11T21:11:46.458Z</lastmod>
<lastmod>2025-06-12T17:24:08.867Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/kernels.swiglu.html</loc>
<lastmod>2025-06-11T21:11:46.652Z</lastmod>
<lastmod>2025-06-12T17:24:09.060Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.cloud.base.html</loc>
<lastmod>2025-06-11T21:11:46.217Z</lastmod>
<lastmod>2025-06-12T17:24:08.627Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.kto.user_defined.html</loc>
<lastmod>2025-06-11T21:11:46.524Z</lastmod>
<lastmod>2025-06-12T17:24:08.933Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.chat_template.html</loc>
<lastmod>2025-06-11T21:11:46.394Z</lastmod>
<lastmod>2025-06-12T17:24:08.803Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.btlm_attn_hijack_flash.html</loc>
<lastmod>2025-06-11T21:11:46.748Z</lastmod>
<lastmod>2025-06-12T17:24:09.156Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schemas.peft.html</loc>
<lastmod>2025-06-11T21:11:46.988Z</lastmod>
<lastmod>2025-06-12T17:24:09.395Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.datasets.chat.html</loc>
<lastmod>2025-06-11T21:11:46.027Z</lastmod>
<lastmod>2025-06-12T17:24:08.437Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/core.trainers.utils.html</loc>
<lastmod>2025-06-11T21:11:46.295Z</lastmod>
<lastmod>2025-06-12T17:24:08.705Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/kernels.geglu.html</loc>
<lastmod>2025-06-11T21:11:46.642Z</lastmod>
<lastmod>2025-06-12T17:24:09.050Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.cloud.modal_.html</loc>
<lastmod>2025-06-11T21:11:46.223Z</lastmod>
<lastmod>2025-06-12T17:24:08.634Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/monkeypatch.llama_attn_hijack_xformers.html</loc>
<lastmod>2025-06-11T21:11:46.689Z</lastmod>
<lastmod>2025-06-12T17:24:09.097Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/utils.schedulers.html</loc>
<lastmod>2025-06-11T21:11:46.879Z</lastmod>
<lastmod>2025-06-12T17:24:09.286Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/prompt_strategies.dpo.llama3.html</loc>
<lastmod>2025-06-11T21:11:46.491Z</lastmod>
<lastmod>2025-06-12T17:24:08.900Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/api/cli.sweeps.html</loc>
<lastmod>2025-06-11T21:11:46.175Z</lastmod>
<lastmod>2025-06-12T17:24:08.585Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/multimodal.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/debugging.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.998Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/multi-gpu.html</loc>
<lastmod>2025-06-11T21:11:16.398Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/lora_optims.html</loc>
<lastmod>2025-06-11T21:11:16.398Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/rlhf.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/amd_hpc.html</loc>
<lastmod>2025-06-11T21:11:16.393Z</lastmod>
<lastmod>2025-06-12T17:23:38.997Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/installation.html</loc>
<lastmod>2025-06-11T21:11:16.398Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/multipack.html</loc>
<lastmod>2025-06-11T21:11:16.399Z</lastmod>
<lastmod>2025-06-12T17:23:39.001Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/dataset_preprocessing.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.998Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/dataset_loading.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.998Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/dataset-formats/inst_tune.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.997Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/dataset-formats/template_free.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.998Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/dataset-formats/index.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.997Z</lastmod>
</url>
<url>
<loc>https://docs.axolotl.ai/docs/dataset-formats/pretraining.html</loc>
<lastmod>2025-06-11T21:11:16.394Z</lastmod>
<lastmod>2025-06-12T17:23:38.997Z</lastmod>
</url>
</urlset>