From 79daf5b934e43f7086007a1b7c5472cf5ac18120 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Sat, 17 May 2025 06:11:00 -0400 Subject: [PATCH] reduce max jobs for build of fa3 --- docker/Dockerfile-base | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile-base b/docker/Dockerfile-base index 87fcfcccb..1eb514ba2 100644 --- a/docker/Dockerfile-base +++ b/docker/Dockerfile-base @@ -43,7 +43,7 @@ RUN if [ "$TORCH_CUDA_ARCH_LIST" = "9.0+PTX" ] ; then \ cd flash-attention; \ git checkout v2.7.4.post1; \ cd hopper; \ - FLASH_ATTENTION_DISABLE_SM80=TRUE FLASH_ATTENTION_DISABLE_FP8=TRUE MAX_JOBS=128 python setup.py install; \ + FLASH_ATTENTION_DISABLE_SM80=TRUE MAX_JOBS=16 python setup.py install; \ cd ../..; \ elif [ "$PYTORCH_VERSION" = "2.7.0" ] ; then \ pip3 install flash-attn==2.7.4.post1; \