michaelmc1618 commited on
Commit
fb37d48
·
verified ·
1 Parent(s): 15bb419

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -0
app.py CHANGED
@@ -1,7 +1,16 @@
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from transformers import AutoModelForCausalLM, pipeline
4
 
 
 
 
 
 
 
 
 
5
  # Use a pipeline as a high-level helper
6
  pipe = pipeline("visual-question-answering", model="dandelin/vilt-b32-finetuned-vqa", trust_remote_code=True)
7
 
 
1
+ import os
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
4
  from transformers import AutoModelForCausalLM, pipeline
5
 
6
+ # Ensure necessary packages are installed
7
+ def install(package):
8
+ os.system(f"pip install {package}")
9
+
10
+ # Install torch and flash_attn
11
+ install("torch")
12
+ install("flash_attn")
13
+
14
  # Use a pipeline as a high-level helper
15
  pipe = pipeline("visual-question-answering", model="dandelin/vilt-b32-finetuned-vqa", trust_remote_code=True)
16