diff --git a/.nojekyll b/.nojekyll index 5361f5b47..d30a5b91b 100644 --- a/.nojekyll +++ b/.nojekyll @@ -1 +1 @@ -11c88725 \ No newline at end of file +1cf0992e \ No newline at end of file diff --git a/FAQS.html b/FAQS.html index 85ac56908..455a991fb 100644 --- a/FAQS.html +++ b/FAQS.html @@ -661,12 +661,6 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true}); FSDP + QLoRA - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  • Install Axolotl following the installation guide.

    Here is an example of how to install from pip:

    # Ensure you have a compatible version of Pytorch installed
    -pip3 install packaging setuptools wheel ninja
    -pip3 install --no-build-isolation 'axolotl[flash-attn]>=0.12.0'
  • +uv pip install --no-build-isolation 'axolotl[flash-attn]>=0.12.0'
  • Run one of the finetuning examples below.

    LFM2

    # FFT SFT (1x48GB @ 25GiB)
    @@ -837,7 +830,7 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true});
     
    # LoRA SFT (1x48GB @ 2.7GiB)
     axolotl train examples/LiquidAI/lfm2-vl-lora.yaml

    LFM2-MoE

    -
    pip install git+https://github.com/huggingface/transformers.git@0c9a72e4576fe4c84077f066e585129c97bfd4e6
    +
    uv pip install git+https://github.com/huggingface/transformers.git@0c9a72e4576fe4c84077f066e585129c97bfd4e6
     
     # LoRA SFT (1x48GB @ 16.2GiB)
     axolotl train examples/LiquidAI/lfm2-8b-a1b-lora.yaml
  • @@ -846,7 +839,7 @@ gtag('config', 'G-9KYCVJBNMQ', { 'anonymize_ip': true});

    TIPS