Built site for gh-pages
This commit is contained in:
64
.github/workflows/main.yml
vendored
64
.github/workflows/main.yml
vendored
@@ -15,21 +15,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- cuda: 126
|
||||
cuda_version: 12.6.3
|
||||
python_version: "3.11"
|
||||
pytorch: 2.7.0
|
||||
axolotl_extras:
|
||||
- cuda: 126
|
||||
cuda_version: 12.6.3
|
||||
python_version: "3.11"
|
||||
pytorch: 2.7.1
|
||||
axolotl_extras: vllm
|
||||
- cuda: 128
|
||||
cuda_version: 12.8.1
|
||||
python_version: "3.11"
|
||||
pytorch: 2.7.1
|
||||
axolotl_extras:
|
||||
- cuda: 128
|
||||
cuda_version: 12.8.1
|
||||
python_version: "3.11"
|
||||
@@ -46,6 +31,11 @@ jobs:
|
||||
python_version: "3.11"
|
||||
pytorch: 2.9.1
|
||||
axolotl_extras:
|
||||
- cuda: 130
|
||||
cuda_version: 13.0.0
|
||||
python_version: "3.11"
|
||||
pytorch: 2.9.1
|
||||
axolotl_extras:
|
||||
runs-on: axolotl-gpu-runner
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -92,27 +82,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cuda: 126
|
||||
cuda_version: 12.6.3
|
||||
python_version: "3.11"
|
||||
pytorch: 2.7.0
|
||||
axolotl_extras:
|
||||
- cuda: 126
|
||||
cuda_version: 12.6.3
|
||||
python_version: "3.11"
|
||||
pytorch: 2.7.1
|
||||
axolotl_extras:
|
||||
is_latest:
|
||||
- cuda: 126
|
||||
cuda_version: 12.6.3
|
||||
python_version: "3.11"
|
||||
pytorch: 2.7.1
|
||||
axolotl_extras: vllm
|
||||
- cuda: 128
|
||||
cuda_version: 12.8.1
|
||||
python_version: "3.11"
|
||||
pytorch: 2.7.1
|
||||
axolotl_extras:
|
||||
- cuda: 128
|
||||
cuda_version: 12.8.1
|
||||
python_version: "3.11"
|
||||
@@ -129,6 +98,11 @@ jobs:
|
||||
python_version: "3.11"
|
||||
pytorch: 2.9.1
|
||||
axolotl_extras:
|
||||
- cuda: 130
|
||||
cuda_version: 13.0.0
|
||||
python_version: "3.11"
|
||||
pytorch: 2.9.1
|
||||
axolotl_extras:
|
||||
runs-on: axolotl-gpu-runner
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -170,24 +144,18 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cuda: 126
|
||||
cuda_version: 12.6.3
|
||||
python_version: "3.11"
|
||||
pytorch: 2.7.1
|
||||
axolotl_extras:
|
||||
is_latest:
|
||||
- cuda: 126
|
||||
cuda_version: 12.6.3
|
||||
python_version: "3.11"
|
||||
pytorch: 2.7.1
|
||||
axolotl_extras: vllm
|
||||
is_latest: true
|
||||
- cuda: 128
|
||||
cuda_version: 12.8.1
|
||||
python_version: "3.11"
|
||||
pytorch: 2.8.0
|
||||
axolotl_extras:
|
||||
is_latest:
|
||||
- cuda: 128
|
||||
cuda_version: 12.8.1
|
||||
python_version: "3.11"
|
||||
pytorch: 2.9.1
|
||||
axolotl_extras:
|
||||
is_latest:
|
||||
runs-on: axolotl-gpu-runner
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
@@ -816,11 +816,8 @@ Important
|
||||
<div class="code-copy-outer-scaffold"><div class="sourceCode" id="cb2"><pre class="sourceCode bash code-with-copy"><code class="sourceCode bash"><span id="cb2-1"><a href="#cb2-1" aria-hidden="true" tabindex="-1"></a><span class="ex">main-base-py{python_version}-cu{cuda_version}-{pytorch_version}</span></span></code></pre></div><button title="Copy to Clipboard" class="code-copy-button"><i class="bi"></i></button></div>
|
||||
<p>Tags examples:</p>
|
||||
<ul>
|
||||
<li><code>main-base-py3.11-cu128-2.7.1</code></li>
|
||||
<li><code>main-base-py3.11-cu126-2.7.1</code></li>
|
||||
<li><code>main-base-py3.11-cu126-2.7.0</code></li>
|
||||
<li><code>main-base-py3.11-cu126-2.6.0</code></li>
|
||||
<li><code>main-base-py3.11-cu124-2.6.0</code></li>
|
||||
<li><code>main-base-py3.11-cu128-2.8.0</code></li>
|
||||
<li><code>main-base-py3.11-cu128-2.9.1</code></li>
|
||||
</ul>
|
||||
</section>
|
||||
</section>
|
||||
@@ -860,15 +857,12 @@ Tip
|
||||
</div>
|
||||
<p>Tags examples:</p>
|
||||
<ul>
|
||||
<li><code>main-py3.11-cu128-2.7.1</code></li>
|
||||
<li><code>main-py3.11-cu126-2.7.1</code></li>
|
||||
<li><code>main-py3.11-cu126-2.7.0</code></li>
|
||||
<li><code>main-py3.11-cu126-2.6.0</code></li>
|
||||
<li><code>main-py3.11-cu124-2.6.0</code></li>
|
||||
<li><code>main-py3.11-cu128-2.8.0</code></li>
|
||||
<li><code>main-py3.11-cu128-2.9.1</code></li>
|
||||
<li><code>main-latest</code></li>
|
||||
<li><code>main-20250303-py3.11-cu124-2.6.0</code></li>
|
||||
<li><code>main-20250303-py3.11-cu126-2.6.0</code></li>
|
||||
<li><code>0.10.1</code></li>
|
||||
<li><code>0.12.0</code></li>
|
||||
</ul>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
@@ -825,7 +825,7 @@ Important
|
||||
</div>
|
||||
</div>
|
||||
<div class="callout-body-container callout-body">
|
||||
<p>For Blackwell GPUs, please use Pytorch 2.7.0 and CUDA 12.8.</p>
|
||||
<p>For Blackwell GPUs, please use Pytorch 2.9.1 and CUDA 12.8.</p>
|
||||
</div>
|
||||
</div>
|
||||
<section id="sec-pypi" class="level3" data-number="2.1">
|
||||
@@ -900,7 +900,7 @@ Important
|
||||
</div>
|
||||
</div>
|
||||
<div class="callout-body-container callout-body">
|
||||
<p>For Blackwell GPUs, please use <code>axolotlai/axolotl:main-py3.11-cu128-2.7.0</code> or the cloud variant <code>axolotlai/axolotl-cloud:main-py3.11-cu128-2.7.0</code>.</p>
|
||||
<p>For Blackwell GPUs, please use <code>axolotlai/axolotl:main-py3.11-cu128-2.9.1</code> or the cloud variant <code>axolotlai/axolotl-cloud:main-py3.11-cu128-2.9.1</code>.</p>
|
||||
</div>
|
||||
</div>
|
||||
<p>Please refer to the <a href="../docs/docker.html">Docker documentation</a> for more information on the different Docker images that are available.</p>
|
||||
|
||||
@@ -844,7 +844,7 @@ Expand older updates
|
||||
<ul>
|
||||
<li>NVIDIA GPU (Ampere or newer for <code>bf16</code> and Flash Attention) or AMD GPU</li>
|
||||
<li>Python 3.11</li>
|
||||
<li>PyTorch ≥2.7.1</li>
|
||||
<li>PyTorch ≥2.8.0</li>
|
||||
</ul>
|
||||
<section id="google-colab" class="level3">
|
||||
<h3 class="anchored" data-anchor-id="google-colab">Google Colab</h3>
|
||||
|
||||
@@ -154,7 +154,7 @@
|
||||
"href": "docs/installation.html#sec-installation-methods",
|
||||
"title": "Installation",
|
||||
"section": "2 Installation Methods",
|
||||
"text": "2 Installation Methods\n\n\n\n\n\n\nImportant\n\n\n\nPlease make sure to have Pytorch installed before installing Axolotl in your local environment.\nFollow the instructions at: https://pytorch.org/get-started/locally/\n\n\n\n\n\n\n\n\nImportant\n\n\n\nFor Blackwell GPUs, please use Pytorch 2.7.0 and CUDA 12.8.\n\n\n\n2.1 PyPI Installation (Recommended)\npip3 install -U packaging setuptools wheel ninja\npip3 install --no-build-isolation axolotl[flash-attn,deepspeed]\nWe use --no-build-isolation in order to detect the installed PyTorch version (if\ninstalled) in order not to clobber it, and so that we set the correct version of\ndependencies that are specific to the PyTorch version or other installed\nco-dependencies.\n\n\n2.2 uv Installation\nuv is a fast, reliable Python package installer and resolver built in Rust. It offers significant performance improvements over pip and provides better dependency resolution, making it an excellent choice for complex environments.\nInstall uv if not already installed\ncurl -LsSf https://astral.sh/uv/install.sh | sh\nsource $HOME/.local/bin/env\nChoose your CUDA version to use with PyTorch; e.g. cu124, cu126, cu128,\nthen create the venv and activate\nexport UV_TORCH_BACKEND=cu126\nuv venv --no-project --relocatable\nsource .venv/bin/activate\nInstall PyTorch\n- PyTorch 2.6.0 recommended\nuv pip install packaging setuptools wheel\nuv pip install torch==2.6.0\nuv pip install awscli pydantic\nInstall axolotl from PyPi\nuv pip install --no-build-isolation axolotl[deepspeed,flash-attn]\n\n# optionally install with vLLM if you're using torch==2.6.0 and want to train w/ GRPO\nuv pip install --no-build-isolation axolotl[deepspeed,flash-attn,vllm]\n\n\n2.3 Edge/Development Build\nFor the latest features between releases:\ngit clone https://github.com/axolotl-ai-cloud/axolotl.git\ncd axolotl\npip3 install -U packaging setuptools wheel ninja\npip3 install --no-build-isolation -e '.[flash-attn,deepspeed]'\n\n\n2.4 Docker\ndocker run --gpus '\"all\"' --rm -it axolotlai/axolotl:main-latest\nFor development with Docker:\ndocker compose up -d\n\n\n\n\n\n\nTipAdvanced Docker Configuration\n\n\n\ndocker run --privileged --gpus '\"all\"' --shm-size 10g --rm -it \\\n --name axolotl --ipc=host \\\n --ulimit memlock=-1 --ulimit stack=67108864 \\\n --mount type=bind,src=\"${PWD}\",target=/workspace/axolotl \\\n -v ${HOME}/.cache/huggingface:/root/.cache/huggingface \\\n axolotlai/axolotl:main-latest\n\n\n\n\n\n\n\n\nImportant\n\n\n\nFor Blackwell GPUs, please use axolotlai/axolotl:main-py3.11-cu128-2.7.0 or the cloud variant axolotlai/axolotl-cloud:main-py3.11-cu128-2.7.0.\n\n\nPlease refer to the Docker documentation for more information on the different Docker images that are available.",
|
||||
"text": "2 Installation Methods\n\n\n\n\n\n\nImportant\n\n\n\nPlease make sure to have Pytorch installed before installing Axolotl in your local environment.\nFollow the instructions at: https://pytorch.org/get-started/locally/\n\n\n\n\n\n\n\n\nImportant\n\n\n\nFor Blackwell GPUs, please use Pytorch 2.9.1 and CUDA 12.8.\n\n\n\n2.1 PyPI Installation (Recommended)\npip3 install -U packaging setuptools wheel ninja\npip3 install --no-build-isolation axolotl[flash-attn,deepspeed]\nWe use --no-build-isolation in order to detect the installed PyTorch version (if\ninstalled) in order not to clobber it, and so that we set the correct version of\ndependencies that are specific to the PyTorch version or other installed\nco-dependencies.\n\n\n2.2 uv Installation\nuv is a fast, reliable Python package installer and resolver built in Rust. It offers significant performance improvements over pip and provides better dependency resolution, making it an excellent choice for complex environments.\nInstall uv if not already installed\ncurl -LsSf https://astral.sh/uv/install.sh | sh\nsource $HOME/.local/bin/env\nChoose your CUDA version to use with PyTorch; e.g. cu124, cu126, cu128,\nthen create the venv and activate\nexport UV_TORCH_BACKEND=cu126\nuv venv --no-project --relocatable\nsource .venv/bin/activate\nInstall PyTorch\n- PyTorch 2.6.0 recommended\nuv pip install packaging setuptools wheel\nuv pip install torch==2.6.0\nuv pip install awscli pydantic\nInstall axolotl from PyPi\nuv pip install --no-build-isolation axolotl[deepspeed,flash-attn]\n\n# optionally install with vLLM if you're using torch==2.6.0 and want to train w/ GRPO\nuv pip install --no-build-isolation axolotl[deepspeed,flash-attn,vllm]\n\n\n2.3 Edge/Development Build\nFor the latest features between releases:\ngit clone https://github.com/axolotl-ai-cloud/axolotl.git\ncd axolotl\npip3 install -U packaging setuptools wheel ninja\npip3 install --no-build-isolation -e '.[flash-attn,deepspeed]'\n\n\n2.4 Docker\ndocker run --gpus '\"all\"' --rm -it axolotlai/axolotl:main-latest\nFor development with Docker:\ndocker compose up -d\n\n\n\n\n\n\nTipAdvanced Docker Configuration\n\n\n\ndocker run --privileged --gpus '\"all\"' --shm-size 10g --rm -it \\\n --name axolotl --ipc=host \\\n --ulimit memlock=-1 --ulimit stack=67108864 \\\n --mount type=bind,src=\"${PWD}\",target=/workspace/axolotl \\\n -v ${HOME}/.cache/huggingface:/root/.cache/huggingface \\\n axolotlai/axolotl:main-latest\n\n\n\n\n\n\n\n\nImportant\n\n\n\nFor Blackwell GPUs, please use axolotlai/axolotl:main-py3.11-cu128-2.9.1 or the cloud variant axolotlai/axolotl-cloud:main-py3.11-cu128-2.9.1.\n\n\nPlease refer to the Docker documentation for more information on the different Docker images that are available.",
|
||||
"crumbs": [
|
||||
"Getting Started",
|
||||
"Installation"
|
||||
@@ -3126,7 +3126,7 @@
|
||||
"href": "index.html#quick-start---llm-fine-tuning-in-minutes",
|
||||
"title": "Axolotl",
|
||||
"section": "🚀 Quick Start - LLM Fine-tuning in Minutes",
|
||||
"text": "🚀 Quick Start - LLM Fine-tuning in Minutes\nRequirements:\n\nNVIDIA GPU (Ampere or newer for bf16 and Flash Attention) or AMD GPU\nPython 3.11\nPyTorch ≥2.7.1\n\n\nGoogle Colab\n\n\n\nOpen In Colab\n\n\n\n\nInstallation\n\nUsing pip\npip3 install -U packaging==23.2 setuptools==75.8.0 wheel ninja\npip3 install --no-build-isolation axolotl[flash-attn,deepspeed]\n\n# Download example axolotl configs, deepspeed configs\naxolotl fetch examples\naxolotl fetch deepspeed_configs # OPTIONAL\n\n\nUsing Docker\nInstalling with Docker can be less error prone than installing in your own environment.\ndocker run --gpus '\"all\"' --rm -it axolotlai/axolotl:main-latest\nOther installation approaches are described here.\n\n\nCloud Providers\n\n\nRunPod\nVast.ai\nPRIME Intellect\nModal\nNovita\nJarvisLabs.ai\nLatitude.sh\n\n\n\n\n\nYour First Fine-tune\n# Fetch axolotl examples\naxolotl fetch examples\n\n# Or, specify a custom path\naxolotl fetch examples --dest path/to/folder\n\n# Train a model using LoRA\naxolotl train examples/llama-3/lora-1b.yml\nThat’s it! Check out our Getting Started Guide for a more detailed walkthrough.",
|
||||
"text": "🚀 Quick Start - LLM Fine-tuning in Minutes\nRequirements:\n\nNVIDIA GPU (Ampere or newer for bf16 and Flash Attention) or AMD GPU\nPython 3.11\nPyTorch ≥2.8.0\n\n\nGoogle Colab\n\n\n\nOpen In Colab\n\n\n\n\nInstallation\n\nUsing pip\npip3 install -U packaging==23.2 setuptools==75.8.0 wheel ninja\npip3 install --no-build-isolation axolotl[flash-attn,deepspeed]\n\n# Download example axolotl configs, deepspeed configs\naxolotl fetch examples\naxolotl fetch deepspeed_configs # OPTIONAL\n\n\nUsing Docker\nInstalling with Docker can be less error prone than installing in your own environment.\ndocker run --gpus '\"all\"' --rm -it axolotlai/axolotl:main-latest\nOther installation approaches are described here.\n\n\nCloud Providers\n\n\nRunPod\nVast.ai\nPRIME Intellect\nModal\nNovita\nJarvisLabs.ai\nLatitude.sh\n\n\n\n\n\nYour First Fine-tune\n# Fetch axolotl examples\naxolotl fetch examples\n\n# Or, specify a custom path\naxolotl fetch examples --dest path/to/folder\n\n# Train a model using LoRA\naxolotl train examples/llama-3/lora-1b.yml\nThat’s it! Check out our Getting Started Guide for a more detailed walkthrough.",
|
||||
"crumbs": [
|
||||
"Home"
|
||||
]
|
||||
@@ -5705,7 +5705,7 @@
|
||||
"href": "docs/docker.html#base",
|
||||
"title": "Docker",
|
||||
"section": "Base",
|
||||
"text": "Base\nThe base image is the most minimal image that can install Axolotl. It is based on the nvidia/cuda image. It includes python, torch, git, git-lfs, awscli, pydantic, and more.\n\nImage\naxolotlai/axolotl-base\nLink: Docker Hub\n\n\nTags format\nmain-base-py{python_version}-cu{cuda_version}-{pytorch_version}\nTags examples:\n\nmain-base-py3.11-cu128-2.7.1\nmain-base-py3.11-cu126-2.7.1\nmain-base-py3.11-cu126-2.7.0\nmain-base-py3.11-cu126-2.6.0\nmain-base-py3.11-cu124-2.6.0",
|
||||
"text": "Base\nThe base image is the most minimal image that can install Axolotl. It is based on the nvidia/cuda image. It includes python, torch, git, git-lfs, awscli, pydantic, and more.\n\nImage\naxolotlai/axolotl-base\nLink: Docker Hub\n\n\nTags format\nmain-base-py{python_version}-cu{cuda_version}-{pytorch_version}\nTags examples:\n\nmain-base-py3.11-cu128-2.8.0\nmain-base-py3.11-cu128-2.9.1",
|
||||
"crumbs": [
|
||||
"Deployments",
|
||||
"Docker"
|
||||
@@ -5716,7 +5716,7 @@
|
||||
"href": "docs/docker.html#main",
|
||||
"title": "Docker",
|
||||
"section": "Main",
|
||||
"text": "Main\nThe main image is the image that is used to run Axolotl. It is based on the axolotlai/axolotl-base image and includes the Axolotl codebase, dependencies, and more.\n\nImage\naxolotlai/axolotl\nLink: Docker Hub\n\n\nTags format\n# on push to main\nmain-py{python_version}-cu{cuda_version}-{pytorch_version}\n\n# latest main (currently torch 2.6.0, python 3.11, cuda 12.4)\nmain-latest\n\n# nightly build\n{branch}-{date_in_YYYYMMDD}-py{python_version}-cu{cuda_version}-{pytorch_version}\n\n# tagged release\n{version}\n\n\n\n\n\n\nTip\n\n\n\nThere may be some extra tags appended to the image, like -vllm which installs those packages.\n\n\nTags examples:\n\nmain-py3.11-cu128-2.7.1\nmain-py3.11-cu126-2.7.1\nmain-py3.11-cu126-2.7.0\nmain-py3.11-cu126-2.6.0\nmain-py3.11-cu124-2.6.0\nmain-latest\nmain-20250303-py3.11-cu124-2.6.0\nmain-20250303-py3.11-cu126-2.6.0\n0.10.1",
|
||||
"text": "Main\nThe main image is the image that is used to run Axolotl. It is based on the axolotlai/axolotl-base image and includes the Axolotl codebase, dependencies, and more.\n\nImage\naxolotlai/axolotl\nLink: Docker Hub\n\n\nTags format\n# on push to main\nmain-py{python_version}-cu{cuda_version}-{pytorch_version}\n\n# latest main (currently torch 2.6.0, python 3.11, cuda 12.4)\nmain-latest\n\n# nightly build\n{branch}-{date_in_YYYYMMDD}-py{python_version}-cu{cuda_version}-{pytorch_version}\n\n# tagged release\n{version}\n\n\n\n\n\n\nTip\n\n\n\nThere may be some extra tags appended to the image, like -vllm which installs those packages.\n\n\nTags examples:\n\nmain-py3.11-cu128-2.8.0\nmain-py3.11-cu128-2.9.1\nmain-latest\nmain-20250303-py3.11-cu124-2.6.0\nmain-20250303-py3.11-cu126-2.6.0\n0.12.0",
|
||||
"crumbs": [
|
||||
"Deployments",
|
||||
"Docker"
|
||||
|
||||
470
sitemap.xml
470
sitemap.xml
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user