267 lines
5.6 KiB
TOML
267 lines
5.6 KiB
TOML
[build-system]
|
|
requires = ["setuptools>=64", "wheel", "setuptools_scm>=8"]
|
|
build-backend = "setuptools.build_meta"
|
|
|
|
[project]
|
|
name = "axolotl"
|
|
version = "0.13.0.dev"
|
|
description = "LLM Trainer"
|
|
readme = "README.md"
|
|
requires-python = ">=3.10"
|
|
license = {text = "Apache-2.0"}
|
|
authors = [
|
|
{name = "Axolotl AI"},
|
|
]
|
|
maintainers = [
|
|
{name = "Axolotl AI"},
|
|
]
|
|
classifiers = [
|
|
"Development Status :: 4 - Beta",
|
|
"License :: OSI Approved :: Apache Software License",
|
|
"Programming Language :: Python :: 3",
|
|
"Programming Language :: Python :: 3.10",
|
|
"Programming Language :: Python :: 3.11",
|
|
"Programming Language :: Python :: 3.12",
|
|
]
|
|
|
|
dependencies = [
|
|
# Core dependencies
|
|
"torch>=2.6.0",
|
|
"packaging==23.2",
|
|
"huggingface_hub>=0.33.0",
|
|
"peft==0.17.0",
|
|
"transformers==4.55.2",
|
|
"tokenizers>=0.21.1",
|
|
"accelerate==1.10.0",
|
|
"datasets==4.0.0",
|
|
"trl==0.21.0",
|
|
"hf_xet==1.1.5",
|
|
"kernels==0.9.0",
|
|
"trackio",
|
|
|
|
# Optimization and training
|
|
"optimum==1.16.2",
|
|
"hf_transfer",
|
|
"sentencepiece",
|
|
"gradio==5.41.1",
|
|
|
|
# Infrastructure
|
|
"modal==1.0.2",
|
|
"pydantic==2.10.6",
|
|
"addict",
|
|
"fire",
|
|
"PyYAML>=6.0",
|
|
"requests",
|
|
"wandb",
|
|
"einops",
|
|
"colorama",
|
|
"numba",
|
|
"numpy>=1.24.4,<3.0",
|
|
|
|
# QLora dependencies
|
|
"evaluate==0.4.1",
|
|
"scipy",
|
|
"scikit-learn>=1.7.0",
|
|
"nvidia-ml-py==12.560.30",
|
|
"art",
|
|
"tensorboard",
|
|
"python-dotenv==1.0.1",
|
|
|
|
# Remote filesystems
|
|
"s3fs>=2024.5.0",
|
|
"gcsfs>=2024.5.0",
|
|
"adlfs>=2024.5.0",
|
|
"ocifs==1.3.2",
|
|
|
|
# Other utilities
|
|
"zstandard>=0.23.0",
|
|
"fastcore",
|
|
|
|
# LM eval harness
|
|
"lm_eval==0.4.7",
|
|
"langdetect==1.0.9",
|
|
"immutabledict==4.2.0",
|
|
"antlr4-python3-runtime==4.13.2",
|
|
|
|
# Training optimizers
|
|
"schedulefree==1.4.1",
|
|
|
|
# Axolotl contribs
|
|
"axolotl-contribs-lgpl @ git+https://github.com/axolotl-ai-cloud/axolotl-contribs-lgpl.git@numpy",
|
|
"axolotl-contribs-mit==0.0.5",
|
|
|
|
# Mistral
|
|
"mistral-common==1.8.3",
|
|
|
|
# Platform-specific dependencies (Linux by default, excluded on macOS)
|
|
"triton>=3.0.0,<3.4.0 ; sys_platform != 'darwin'",
|
|
"xformers>=0.0.23.post1 ; sys_platform != 'darwin'",
|
|
"autoawq==0.2.7.post3 ; sys_platform != 'darwin'",
|
|
"liger-kernel==0.6.1 ; sys_platform != 'darwin'",
|
|
"torchao==0.12.0 ; sys_platform != 'darwin'",
|
|
"bitsandbytes==0.47.0 ; sys_platform != 'darwin'",
|
|
"flash-attn==2.8.2 ; sys_platform == 'linux'",
|
|
]
|
|
|
|
[project.optional-dependencies]
|
|
# Specific hardware acceleration
|
|
ring-flash-attn = [
|
|
"ring-flash-attn>=0.1.7",
|
|
"yunchang==0.6.0",
|
|
]
|
|
|
|
# Deep learning frameworks
|
|
deepspeed = [
|
|
"deepspeed>=0.17.5",
|
|
"deepspeed-kernels",
|
|
]
|
|
|
|
mamba-ssm = [
|
|
"mamba-ssm>=2.2.0", # Updated to latest stable version
|
|
"causal_conv1d>=1.4.0",
|
|
]
|
|
|
|
# Quantization
|
|
auto-gptq = [
|
|
"auto-gptq==0.5.1",
|
|
]
|
|
|
|
# Experiment tracking
|
|
mlflow = [
|
|
"mlflow",
|
|
]
|
|
|
|
# Optimizers
|
|
galore = [
|
|
"galore_torch",
|
|
]
|
|
|
|
apollo = [
|
|
"apollo-torch",
|
|
]
|
|
|
|
optimizers = [
|
|
"galore_torch",
|
|
"apollo-torch",
|
|
"lomo-optim==0.1.1",
|
|
"torch-optimi==0.2.1",
|
|
"came_pytorch==0.1.3",
|
|
]
|
|
|
|
# Distributed training
|
|
ray = [
|
|
"ray[train]",
|
|
]
|
|
|
|
# Inference (Note: vllm and llmcompressor have conflicting dependencies, install separately)
|
|
vllm = [
|
|
"vllm>=0.10.0",
|
|
]
|
|
|
|
llmcompressor = [
|
|
"llmcompressor>=0.5.1",
|
|
]
|
|
|
|
# Development dependencies
|
|
dev = [
|
|
"pytest",
|
|
"pytest-cov",
|
|
"pytest-xdist",
|
|
"pre-commit",
|
|
"ruff",
|
|
"mypy",
|
|
]
|
|
|
|
[project.scripts]
|
|
axolotl = "axolotl.cli.main:main"
|
|
|
|
[project.urls]
|
|
Homepage = "https://axolotl.ai/"
|
|
Documentation = "https://docs.axolotl.ai/"
|
|
Repository = "https://github.com/axolotl-ai-cloud/axolotl.git"
|
|
Issues = "https://github.com/axolotl-ai-cloud/axolotl/issues"
|
|
|
|
[tool.setuptools]
|
|
package-dir = {"" = "src"}
|
|
include-package-data = true
|
|
|
|
[tool.setuptools.packages.find]
|
|
where = ["src"]
|
|
|
|
[tool.setuptools.package-data]
|
|
"*" = ["*.yaml", "*.yml", "*.json"]
|
|
|
|
[tool.setuptools_scm]
|
|
write_to = "src/axolotl/_version.py"
|
|
|
|
[tool.ruff]
|
|
line-length = 88
|
|
target-version = "py310"
|
|
|
|
[tool.ruff.lint]
|
|
select = ["E", "F", "W", "C90", "B"]
|
|
ignore = [
|
|
"E203", # Whitespace before ':'
|
|
"E501", # Line too long
|
|
"C901", # Too complex
|
|
"B019", # Use of functools.cache on methods
|
|
"E722", # Bare except
|
|
"F821", # Undefined name (for dynamic exec)
|
|
]
|
|
|
|
[tool.ruff.lint.isort]
|
|
known-third-party = ["wandb", "comet_ml"]
|
|
known-local-folder = ["src", "tests"]
|
|
# Black-compatible isort settings
|
|
force-single-line = false
|
|
combine-as-imports = true
|
|
split-on-trailing-comma = true
|
|
|
|
[tool.ruff.format]
|
|
# Use black's formatting style exactly
|
|
quote-style = "double"
|
|
indent-style = "space"
|
|
skip-magic-trailing-comma = false
|
|
line-ending = "auto"
|
|
docstring-code-format = false
|
|
|
|
[tool.mypy]
|
|
python_version = "3.10"
|
|
warn_return_any = true
|
|
warn_unused_configs = true
|
|
ignore_missing_imports = true
|
|
|
|
[tool.pytest.ini_options]
|
|
testpaths = ["tests"]
|
|
python_files = ["test_*.py", "*_test.py"]
|
|
addopts = "-v --tb=short"
|
|
|
|
# UV specific configuration
|
|
[tool.uv]
|
|
dev-dependencies = [
|
|
"pytest",
|
|
"pytest-cov",
|
|
"pytest-xdist",
|
|
"pre-commit",
|
|
"ruff",
|
|
"mypy",
|
|
]
|
|
|
|
find-links = [
|
|
"https://github.com/Dao-AILab/flash-attention/releases/expanded_assets/v2.8.2",
|
|
]
|
|
|
|
# UV custom index for specific packages
|
|
[[tool.uv.index]]
|
|
name = "autogptq"
|
|
url = "https://huggingface.github.io/autogptq-index/whl/"
|
|
|
|
# Build dependencies for packages that don't declare them properly
|
|
[tool.uv.extra-build-dependencies]
|
|
mamba-ssm = ["torch", "causal_conv1d"]
|
|
flash-attn = ["torch", "packaging", "wheel", "setuptools"]
|
|
autoawq = ["torch"]
|
|
triton = ["torch"]
|
|
bitsandbytes = ["torch"]
|
|
grpclib = ["wheel"]
|