KaiChen1998 commited on
Commit
20afcd2
·
1 Parent(s): 7627835
Files changed (1) hide show
  1. requirements.txt +2 -1
requirements.txt CHANGED
@@ -7,7 +7,8 @@ accelerate==0.33.0
7
  einops==0.6.1
8
  einops-exts==0.0.4
9
  timm==0.6.13
10
- https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.5/flash_attn-2.3.5+cu117torch2.0cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
 
11
  scipy
12
  gradio
13
 
 
7
  einops==0.6.1
8
  einops-exts==0.0.4
9
  timm==0.6.13
10
+ flash_attn
11
+ # https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.5/flash_attn-2.3.5+cu117torch2.0cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
12
  scipy
13
  gradio
14