MohamedRashad commited on
Commit
f99e530
·
1 Parent(s): dd937a5

chore: Update flash-attn installation command in app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -4,7 +4,7 @@ import torch
4
  import gradio as gr
5
  from threading import Thread
6
  import subprocess
7
- subprocess.run('pip install -U flash-attn', shell=True)
8
 
9
  models_available = [
10
  "MohamedRashad/Arabic-Orpo-Llama-3-8B-Instruct",
 
4
  import gradio as gr
5
  from threading import Thread
6
  import subprocess
7
+ subprocess.run('pip install -U flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
8
 
9
  models_available = [
10
  "MohamedRashad/Arabic-Orpo-Llama-3-8B-Instruct",