reduce max jobs for build of fa3
This commit is contained in:
@@ -43,7 +43,7 @@ RUN if [ "$TORCH_CUDA_ARCH_LIST" = "9.0+PTX" ] ; then \
|
||||
cd flash-attention; \
|
||||
git checkout v2.7.4.post1; \
|
||||
cd hopper; \
|
||||
FLASH_ATTENTION_DISABLE_SM80=TRUE FLASH_ATTENTION_DISABLE_FP8=TRUE MAX_JOBS=128 python setup.py install; \
|
||||
FLASH_ATTENTION_DISABLE_SM80=TRUE MAX_JOBS=16 python setup.py install; \
|
||||
cd ../..; \
|
||||
elif [ "$PYTORCH_VERSION" = "2.7.0" ] ; then \
|
||||
pip3 install flash-attn==2.7.4.post1; \
|
||||
|
||||
Reference in New Issue
Block a user