coderabbito, improvements

This commit is contained in:
Dan Saunders
2025-09-11 12:11:00 -04:00
parent 4c81172917
commit 37d07bd7f7
8 changed files with 31 additions and 28 deletions

View File

@@ -29,14 +29,16 @@ PRs are **greatly welcome**!
2. Set up the development environment by following the instructions in the [README.md](https://github.com/axolotl-ai-cloud/axolotl/tree/main/README.md) file.
3. Explore the codebase, run tests, and verify that everything works as expected.
Please run below to setup env
Please run the below to setup:
```bash
uv pip install -e .[dev]
git clone https://github.com/axolotl-ai-cloud/axolotl.git
cd axolotl
pre-commit install
uv sync # TODO(djsaunde): extras installation details
uv run pre-commit install
# test
pytest tests/
pytest tests/ # optional
```
## How to Contribute

View File

@@ -50,7 +50,7 @@ jobs:
- name: Extract tag name
id: tag
run: echo ::set-output name=TAG_NAME::$(echo $GITHUB_REF | cut -d / -f 3)
run: echo "TAG_NAME=$(echo "$GITHUB_REF" | cut -d / -f 3)" >> "$GITHUB_OUTPUT"
- name: Build package
run: |

2
.gitignore vendored
View File

@@ -191,5 +191,5 @@ out/
# vim
*.swp
# scm auto-versioning
# setuptools-scm generated version file
src/axolotl/_version.py

View File

@@ -2,4 +2,4 @@ include pyproject.toml
include README.md
include LICENSE
include src/axolotl/utils/chat_templates/templates/*.jinja
recursive-include axolotl *.py
recursive-include src/axolotl *.py

View File

@@ -65,14 +65,9 @@ Features:
- **Flexible Dataset Handling**: Load from local, HuggingFace, and cloud (S3, Azure, GCP, OCI) datasets.
- **Cloud Ready**: We ship [Docker images](https://hub.docker.com/u/axolotlai) and also [PyPI packages](https://pypi.org/project/axolotl/) for use on cloud platforms and local hardware.
## 🚀 Quick Start - LLM Fine-tuning in Minutes
**Requirements**:
- NVIDIA GPU (Ampere or newer for `bf16` and Flash Attention) or AMD GPU
- Python 3.11+
**Requirements**: NVIDIA GPU (Ampere+) or AMD GPU, Python 3.11+
### Google Colab
@@ -86,12 +81,17 @@ Features:
# Install uv
curl -LsSf https://astral.sh/uv/install.sh | sh
# Add axolotl to your project
# One-off usage
uvx axolotl fetch examples
# Or, in your project
uv init my-project && cd my-project
uv add axolotl
source .venv/bin/activate
# Download example axolotl configs, deepspeed configs
axolotl fetch examples
axolotl fetch deepspeed_configs # OPTIONAL
axolotl fetch deepspeed_configs # optional
```
#### Using pip
@@ -102,7 +102,7 @@ pip3 install --no-build-isolation axolotl
# Download example axolotl configs, deepspeed configs
axolotl fetch examples
axolotl fetch deepspeed_configs # OPTIONAL
axolotl fetch deepspeed_configs # optional
```
#### Using Docker

View File

@@ -35,16 +35,16 @@ RUN if [ "$NIGHTLY_BUILD" = "true" ] ; then \
RUN uv pip install packaging==23.2 setuptools==75.8.0
RUN if [ "$AXOLOTL_EXTRAS" != "" ] ; then \
uv pip install --no-build-isolation -e .[ring-flash-attn,optimizers,ray,$AXOLOTL_EXTRAS] $AXOLOTL_ARGS; \
uv sync --frozen -E ring-flash-attn -E optimizers -E ray $(printf ' -E %s' "${AXOLOTL_EXTRAS//,/ }") $AXOLOTL_ARGS; \
else \
uv pip install --no-build-isolation -e .[ring-flash-attn,optimizers,ray] $AXOLOTL_ARGS; \
uv sync --frozen -E ring-flash-attn -E optimizers -E ray $AXOLOTL_ARGS; \
fi
RUN python scripts/unsloth_install.py | sh
RUN python scripts/cutcrossentropy_install.py | sh
# So we can test the Docker image
RUN uv pip install -e .[dev]
RUN uv pip install -e ".[dev]"
# fix so that git fetch/pull from remote works
RUN git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" && \

View File

@@ -95,10 +95,7 @@ For the latest features between releases:
```{.bash}
git clone https://github.com/axolotl-ai-cloud/axolotl.git
cd axolotl
# Install uv if not already installed
curl -LsSf https://astral.sh/uv/install.sh | sh
# Create and sync environment
uv venv
curl -LsSf https://astral.sh/uv/install.sh | sh # If not already installed
uv sync
```

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "axolotl"
version = "0.13.0.dev"
dynamic = ["version"]
description = "LLM Trainer"
readme = "README.md"
requires-python = ">=3.10"
@@ -27,7 +27,7 @@ classifiers = [
dependencies = [
# Core dependencies
"torch>=2.6.0",
"packaging==23.2",
"packaging>=23.2",
"huggingface_hub>=0.33.0",
"peft==0.17.0",
"transformers==4.55.2",
@@ -260,7 +260,6 @@ dev-dependencies = [
"ruff",
"mypy",
]
find-links = [
"https://github.com/Dao-AILab/flash-attention/releases/expanded_assets/v2.8.3",
]
@@ -273,7 +272,12 @@ url = "https://huggingface.github.io/autogptq-index/whl/"
# Build dependencies for packages that don't declare them properly
[tool.uv.extra-build-dependencies]
mamba-ssm = ["torch", "causal_conv1d"]
flash-attn = ["torch", "packaging", "wheel", "setuptools"]
flash-attn = [
"packaging",
"wheel",
"setuptools",
{ requirement = "torch", match-runtime = true },
]
autoawq = ["torch"]
triton = ["torch"]
bitsandbytes = ["torch"]