Shanshan Wang commited on
Commit
b25f6a7
1 Parent(s): 0500e0e
Files changed (1) hide show
  1. requirements.txt +1 -1
requirements.txt CHANGED
@@ -11,7 +11,7 @@ torch==2.4.0
11
  torchvision==0.19.0
12
  pillow==10.2.0
13
  transformers @ git+https://github.com/huggingface/transformers@673440d073d5f534a6d6bedeeca94869afd8d0a7
14
- flash-attn @ git+https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+cu122torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
15
  peft==0.11.1
16
  absl-py==2.1.0
17
  accelerate==0.30.1
 
11
  torchvision==0.19.0
12
  pillow==10.2.0
13
  transformers @ git+https://github.com/huggingface/transformers@673440d073d5f534a6d6bedeeca94869afd8d0a7
14
+ flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+cu122torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
15
  peft==0.11.1
16
  absl-py==2.1.0
17
  accelerate==0.30.1