pin flash attention 2 to the fix for backwards pass

This commit is contained in:
Wing Lian
2023-07-21 08:18:53 -04:00
parent 9b790d359b
commit cdf85fdbd5

View File

@@ -40,7 +40,7 @@ ARG TORCH_CUDA_ARCH_LIST="7.0 7.5 8.0 8.6+PTX"
RUN git clone https://github.com/Dao-AILab/flash-attention.git && \
cd flash-attention && \
git checkout v2.0.0 && \
git checkout 9ee0ff1 && \
python3 setup.py bdist_wheel && \
cd csrc/fused_dense_lib && \
python3 setup.py bdist_wheel && \