don't build images w cuda 130 since we don't have flash attention wheels (#3341)

This commit is contained in:
Wing Lian
2026-01-03 18:08:28 -05:00
committed by GitHub
parent afe18ace35
commit b26ba3a5cb
2 changed files with 11 additions and 11 deletions

View File

@@ -31,11 +31,11 @@ jobs:
python_version: "3.11"
pytorch: 2.9.1
axolotl_extras:
- cuda: 130
cuda_version: 13.0.0
python_version: "3.11"
pytorch: 2.9.1
axolotl_extras:
# - cuda: 130
# cuda_version: 13.0.0
# python_version: "3.11"
# pytorch: 2.9.1
# axolotl_extras:
runs-on: axolotl-gpu-runner
steps:
- name: Checkout
@@ -98,11 +98,11 @@ jobs:
python_version: "3.11"
pytorch: 2.9.1
axolotl_extras:
- cuda: 130
cuda_version: 13.0.0
python_version: "3.11"
pytorch: 2.9.1
axolotl_extras:
# - cuda: 130
# cuda_version: 13.0.0
# python_version: "3.11"
# pytorch: 2.9.1
# axolotl_extras:
runs-on: axolotl-gpu-runner
steps:
- name: Checkout