* feat: move to uv first * fix: update doc to uv first * fix: merge dev/tests into uv pyproject * fix: update docker docs to match current config * fix: migrate examples to readme * fix: add llmcompressor to conflict * feat: rec uv sync with lockfile for dev/ci * fix: update docker docs to clarify how to use uv images * chore: docs * fix: use system python, no venv * fix: set backend cpu * fix: only set for installing pytorch step * fix: remove unsloth kernel and installs * fix: remove U in tests * fix: set backend in deps too * chore: test * chore: comments * fix: attempt to lock torch * fix: workaround torch cuda and not upgraded * fix: forgot to push * fix: missed source * fix: nightly upstream loralinear config * fix: nightly phi3 long rope not work * fix: forgot commit * fix: test phi3 template change * fix: no more requirements * fix: carry over changes from new requirements to pyproject * chore: remove lockfile per discussion * fix: set match-runtime * fix: remove unneeded hf hub buildtime * fix: duplicate cache delete on nightly * fix: torchvision being overridden * fix: migrate to uv images * fix: leftover from merge * fix: simplify base readme * fix: update assertion message to be clearer * chore: docs * fix: change fallback for cicd script * fix: match against main exactly * fix: peft 0.19.1 change * fix: e2e test * fix: ci * fix: e2e test
257 lines
5.3 KiB
TOML
257 lines
5.3 KiB
TOML
[build-system]
|
|
requires = ["setuptools>=64", "wheel", "setuptools_scm>=8"]
|
|
build-backend = "setuptools.build_meta"
|
|
|
|
[project]
|
|
name = "axolotl"
|
|
dynamic = ["version"]
|
|
description = "LLM Trainer"
|
|
readme = "README.md"
|
|
requires-python = ">=3.10"
|
|
# license = "Apache-2.0"
|
|
|
|
dependencies = [
|
|
# Core ML stack
|
|
"torch>=2.6.0",
|
|
"packaging==26.0",
|
|
"huggingface_hub>=1.1.7",
|
|
"peft>=0.19.1,<0.20.0",
|
|
"tokenizers>=0.22.1",
|
|
"transformers==5.5.4",
|
|
"accelerate==1.13.0",
|
|
"datasets>=4.8.4,<4.9.0",
|
|
"trl==1.1.0",
|
|
"hf_xet==1.4.3",
|
|
"kernels==0.13.0",
|
|
"trackio>=0.16.1",
|
|
"typing-extensions>=4.15.0",
|
|
"optimum==1.16.2",
|
|
"hf_transfer",
|
|
"sentencepiece",
|
|
"gradio>=6.2.0,<7.0",
|
|
"modal==1.3.0.post1",
|
|
"pydantic>=2.10.6",
|
|
"addict",
|
|
"fire",
|
|
"PyYAML>=6.0",
|
|
"requests",
|
|
"wandb",
|
|
"einops",
|
|
"colorama",
|
|
"numba>=0.61.2",
|
|
"numpy>=2.2.6",
|
|
|
|
# Evaluation & metrics
|
|
"evaluate==0.4.1",
|
|
"scipy",
|
|
"nvidia-ml-py==12.560.30",
|
|
"art",
|
|
"tensorboard",
|
|
"python-dotenv==1.0.1",
|
|
|
|
# Remote filesystems
|
|
"s3fs>=2024.5.0",
|
|
"gcsfs>=2025.3.0",
|
|
"adlfs>=2024.5.0",
|
|
"ocifs==1.3.2",
|
|
|
|
"zstandard==0.22.0",
|
|
"fastcore",
|
|
|
|
# lm eval harness
|
|
"lm_eval==0.4.11",
|
|
"langdetect==1.0.9",
|
|
"immutabledict==4.2.0",
|
|
"antlr4-python3-runtime==4.13.2",
|
|
|
|
"schedulefree==1.4.1",
|
|
"openenv-core==0.1.0",
|
|
|
|
# Axolotl contribs
|
|
"axolotl-contribs-lgpl==0.0.7",
|
|
"axolotl-contribs-mit==0.0.6",
|
|
|
|
# Telemetry
|
|
"posthog==6.7.11",
|
|
|
|
"mistral-common==1.11.0",
|
|
|
|
# Platform-specific (Linux only)
|
|
"bitsandbytes==0.49.1 ; sys_platform != 'darwin'",
|
|
"triton>=3.4.0 ; sys_platform != 'darwin'",
|
|
"xformers>=0.0.23.post1 ; sys_platform != 'darwin'",
|
|
"liger-kernel==0.7.0 ; sys_platform != 'darwin'",
|
|
"torchao==0.17.0 ; sys_platform != 'darwin' and platform_machine != 'aarch64'",
|
|
|
|
# Architecture-specific
|
|
"fla-core==0.4.1 ; platform_machine != 'aarch64'",
|
|
"flash-linear-attention==0.4.1 ; platform_machine != 'aarch64'",
|
|
]
|
|
|
|
[project.optional-dependencies]
|
|
flash-attn = ["flash-attn==2.8.3"]
|
|
ring-flash-attn = [
|
|
"flash-attn==2.8.3",
|
|
"ring-flash-attn>=0.1.7",
|
|
]
|
|
deepspeed = [
|
|
"deepspeed>=0.18.6,<0.19.0",
|
|
"deepspeed-kernels",
|
|
]
|
|
mamba-ssm = [
|
|
"mamba-ssm==1.2.0.post1",
|
|
"causal_conv1d",
|
|
]
|
|
auto-gptq = [
|
|
"auto-gptq==0.5.1",
|
|
]
|
|
mlflow = [
|
|
"mlflow",
|
|
]
|
|
galore = [
|
|
"galore_torch",
|
|
]
|
|
apollo = [
|
|
"apollo-torch",
|
|
]
|
|
optimizers = [
|
|
"galore_torch",
|
|
"apollo-torch",
|
|
"lomo-optim==0.1.1",
|
|
"torch-optimi==0.2.1",
|
|
"came_pytorch==0.1.3",
|
|
]
|
|
ray = [
|
|
"ray[train]>=2.52.1",
|
|
]
|
|
vllm = [
|
|
"vllm>=0.15.0",
|
|
]
|
|
llmcompressor = [
|
|
"llmcompressor>=0.10.0",
|
|
]
|
|
fbgemm-gpu = ["fbgemm-gpu-genai>=1.3.0"]
|
|
opentelemetry = [
|
|
"opentelemetry-api",
|
|
"opentelemetry-sdk",
|
|
"opentelemetry-exporter-prometheus",
|
|
"prometheus-client",
|
|
]
|
|
|
|
[dependency-groups]
|
|
dev = [
|
|
"black",
|
|
"mypy",
|
|
"pre-commit",
|
|
"types-requests",
|
|
"quartodoc",
|
|
"jupyter",
|
|
"blobfile",
|
|
"tiktoken",
|
|
]
|
|
test = [
|
|
"codecov",
|
|
"codecov-cli",
|
|
"pytest",
|
|
"pytest-cov",
|
|
"pytest-retry",
|
|
"pytest-sugar",
|
|
"pytest-xdist",
|
|
"tbparse",
|
|
]
|
|
|
|
[project.scripts]
|
|
axolotl = "axolotl.cli.main:main"
|
|
|
|
[project.urls]
|
|
Homepage = "https://axolotl.ai/"
|
|
Documentation = "https://docs.axolotl.ai/"
|
|
Repository = "https://github.com/axolotl-ai-cloud/axolotl.git"
|
|
|
|
[tool.setuptools]
|
|
include-package-data = true
|
|
|
|
[tool.setuptools.packages.find]
|
|
where = ["src"]
|
|
|
|
[tool.setuptools.dynamic]
|
|
version = { file = "VERSION" }
|
|
|
|
[tool.ruff]
|
|
line-length = 88
|
|
target-version = "py310"
|
|
|
|
[tool.ruff.lint]
|
|
select = ["E", "F", "W", "C90", "B", "I"]
|
|
ignore = [
|
|
"E203", # Whitespace before ':'
|
|
"E501", # Line too long
|
|
"C901", # Too complex
|
|
"B019", # Use of functools.cache on methods
|
|
"E722", # Bare except
|
|
"F821", # Undefined name (for dynamic exec)
|
|
]
|
|
|
|
[tool.ruff.lint.isort]
|
|
known-third-party = ["wandb", "comet_ml"]
|
|
known-local-folder = ["src", "tests"]
|
|
# Black-compatible isort settings
|
|
force-single-line = false
|
|
combine-as-imports = true
|
|
split-on-trailing-comma = true
|
|
|
|
[tool.ruff.format]
|
|
# Use black's formatting style exactly
|
|
quote-style = "double"
|
|
indent-style = "space"
|
|
skip-magic-trailing-comma = false
|
|
line-ending = "auto"
|
|
docstring-code-format = false
|
|
|
|
[tool.pytest.ini_options]
|
|
addopts = "-m 'not slow'"
|
|
markers = [
|
|
"slow: marks tests as slow",
|
|
]
|
|
|
|
# UV specific configuration
|
|
[tool.uv]
|
|
prerelease = "allow"
|
|
conflicts = [
|
|
[
|
|
{ package = "axolotl" },
|
|
{ extra = "vllm" },
|
|
],
|
|
[
|
|
{ package = "axolotl" },
|
|
{ extra = "flash-attn" },
|
|
],
|
|
[
|
|
{ package = "axolotl" },
|
|
{ extra = "ring-flash-attn" },
|
|
],
|
|
[
|
|
{ package = "axolotl" },
|
|
{ extra = "mamba-ssm" },
|
|
],
|
|
[
|
|
{ package = "axolotl" },
|
|
{ extra = "auto-gptq" },
|
|
],
|
|
[
|
|
{ package = "axolotl" },
|
|
{ extra = "fbgemm-gpu" },
|
|
],
|
|
[
|
|
{ package = "axolotl" },
|
|
{ extra = "llmcompressor" },
|
|
],
|
|
]
|
|
|
|
[tool.uv.extra-build-dependencies]
|
|
mamba-ssm = [{ requirement = "torch", match-runtime = true }]
|
|
causal-conv1d = [{ requirement = "torch", match-runtime = true }]
|
|
flash-attn = [{ requirement = "torch", match-runtime = true }]
|
|
deepspeed = [{ requirement = "torch", match-runtime = true }]
|
|
auto-gptq = [{ requirement = "torch", match-runtime = true }]
|