michaelmc1618 commited on
Commit
d625ced
·
verified ·
1 Parent(s): fb37d48

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -6
app.py CHANGED
@@ -1,16 +1,28 @@
1
  import os
2
- import gradio as gr
3
- from huggingface_hub import InferenceClient
4
- from transformers import AutoModelForCausalLM, pipeline
5
 
6
- # Ensure necessary packages are installed
7
  def install(package):
8
- os.system(f"pip install {package}")
9
 
10
- # Install torch and flash_attn
11
  install("torch")
 
 
12
  install("flash_attn")
13
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  # Use a pipeline as a high-level helper
15
  pipe = pipeline("visual-question-answering", model="dandelin/vilt-b32-finetuned-vqa", trust_remote_code=True)
16
 
 
1
  import os
2
+ import subprocess
3
+ import sys
 
4
 
 
5
  def install(package):
6
+ subprocess.check_call([sys.executable, "-m", "pip", "install", package])
7
 
8
+ # Install torch first
9
  install("torch")
10
+
11
+ # Install flash_attn next
12
  install("flash_attn")
13
 
14
+ # Now install other dependencies
15
+ install("huggingface_hub==0.22.2")
16
+ install("transformers")
17
+ install("openai")
18
+ install("gradio")
19
+ install("einops")
20
+ install("timm")
21
+
22
+ import gradio as gr
23
+ from huggingface_hub import InferenceClient
24
+ from transformers import AutoModelForCausalLM, pipeline
25
+
26
  # Use a pipeline as a high-level helper
27
  pipe = pipeline("visual-question-answering", model="dandelin/vilt-b32-finetuned-vqa", trust_remote_code=True)
28