* auto gptq support * more tweaks and add yml * remove old gptq docker * don't need explicit peft install for tests * fix setup.py to use extra index url install torch for tests fix cuda version for autogptq index set torch in requirements so that it installs properly move gptq install around to work with github cicd * gptq doesn't play well with sample packing * address pr feedback * remove torch install for now * set quantization_config from model config * Fix the implementation for getting quant config from model config
100 lines
3.5 KiB
YAML
100 lines
3.5 KiB
YAML
name: ci-cd
|
|
|
|
on:
|
|
push:
|
|
branches:
|
|
- "main"
|
|
|
|
jobs:
|
|
build-axolotl:
|
|
if: github.repository_owner == 'OpenAccess-AI-Collective'
|
|
# this job needs to be run on self-hosted GPU runners...
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
include:
|
|
- cuda: 118
|
|
cuda_version: 11.8.0
|
|
python_version: "3.9"
|
|
pytorch: 2.0.1
|
|
axolotl_extras:
|
|
- cuda: 118
|
|
cuda_version: 11.8.0
|
|
python_version: "3.10"
|
|
pytorch: 2.0.1
|
|
axolotl_extras:
|
|
runs-on: self-hosted
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
- name: Docker metadata
|
|
id: metadata
|
|
uses: docker/metadata-action@v3
|
|
with:
|
|
images: winglian/axolotl
|
|
- name: Login to Docker Hub
|
|
uses: docker/login-action@v2
|
|
with:
|
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
- name: Set up Docker Buildx
|
|
uses: docker/setup-buildx-action@v2
|
|
- name: Build
|
|
uses: docker/build-push-action@v4
|
|
with:
|
|
context: .
|
|
build-args: |
|
|
BASE_TAG=${{ github.ref_name }}-base-py${{ matrix.python_version }}-cu${{ matrix.cuda }}-${{ matrix.pytorch }}
|
|
CUDA=${{ matrix.cuda }}
|
|
file: ./docker/Dockerfile
|
|
push: ${{ github.event_name != 'pull_request' }}
|
|
tags: ${{ steps.metadata.outputs.tags }}-py${{ matrix.python_version }}-cu${{ matrix.cuda }}-${{ matrix.pytorch }}${{ matrix.axolotl_extras != '' && '-' || '' }}${{ matrix.axolotl_extras }}
|
|
labels: ${{ steps.metadata.outputs.labels }}
|
|
build-axolotl-runpod:
|
|
needs: build-axolotl
|
|
if: github.repository_owner == 'OpenAccess-AI-Collective'
|
|
# this job needs to be run on self-hosted GPU runners...
|
|
strategy:
|
|
matrix:
|
|
include:
|
|
- cuda: 118
|
|
cuda_version: 11.8.0
|
|
python_version: "3.9"
|
|
pytorch: 2.0.1
|
|
axolotl_extras:
|
|
- cuda: 118
|
|
cuda_version: 11.8.0
|
|
python_version: "3.10"
|
|
pytorch: 2.0.1
|
|
axolotl_extras:
|
|
is_latest: true
|
|
runs-on: self-hosted
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
- name: Docker metadata
|
|
id: metadata
|
|
uses: docker/metadata-action@v3
|
|
with:
|
|
images: winglian/axolotl-runpod
|
|
- name: Login to Docker Hub
|
|
uses: docker/login-action@v2
|
|
with:
|
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
- name: Set up Docker Buildx
|
|
uses: docker/setup-buildx-action@v2
|
|
- name: Build
|
|
uses: docker/build-push-action@v4
|
|
with:
|
|
context: .
|
|
build-args: |
|
|
BASE_TAG=${{ github.ref_name }}-py${{ matrix.python_version }}-cu${{ matrix.cuda }}-${{ matrix.pytorch }}${{ matrix.axolotl_extras != '' && '-' || '' }}${{ matrix.axolotl_extras }}
|
|
CUDA=${{ matrix.cuda }}
|
|
file: ./docker/Dockerfile-runpod
|
|
push: ${{ github.event_name != 'pull_request' }}
|
|
tags: |
|
|
${{ steps.metadata.outputs.tags }}-py${{ matrix.python_version }}-cu${{ matrix.cuda }}-${{ matrix.pytorch }}${{ matrix.axolotl_extras != '' && '-' || '' }}${{ matrix.axolotl_extras }}
|
|
${{ (matrix.is_latest) && format('{0}-latest', steps.metadata.outputs.tags) || '' }}
|
|
labels: ${{ steps.metadata.outputs.labels }}
|