File tree Expand file tree Collapse file tree 2 files changed +3
-3
lines changed Expand file tree Collapse file tree 2 files changed +3
-3
lines changed Original file line number Diff line number Diff line change 1
- __version__ = "2.3.0 "
1
+ __version__ = "2.3.1 "
2
2
3
3
from flash_attn .flash_attn_interface import (
4
4
flash_attn_func ,
Original file line number Diff line number Diff line change @@ -85,11 +85,11 @@ RUN pip install transformers==4.25.1 datasets==2.8.0 pytorch-lightning==1.8.6 tr
85
85
RUN pip install git+https://github.com/mlcommons/
[email protected]
86
86
87
87
# Install FlashAttention
88
- RUN pip install flash-attn==2.3.0
88
+ RUN pip install flash-attn==2.3.1
89
89
90
90
# Install CUDA extensions for fused dense, layer norm
91
91
RUN git clone https://github.com/HazyResearch/flash-attention \
92
- && cd flash-attention && git checkout v2.3.0 \
92
+ && cd flash-attention && git checkout v2.3.1 \
93
93
&& cd csrc/layer_norm && pip install . && cd ../../ \
94
94
&& cd csrc/fused_dense_lib && pip install . && cd ../../ \
95
95
&& cd .. && rm -rf flash-attention
You can’t perform that action at this time.
0 commit comments