Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -17,7 +17,6 @@ processor = AutoProcessor.from_pretrained("ahmed-masry/ChartInstruct-LLama2")
|
|
17 |
@spaces.GPU
|
18 |
def predict(image, input_text):
|
19 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
20 |
-
model.to(device)
|
21 |
|
22 |
input_prompt = f"<image>\n Question: {input_text} Answer: "
|
23 |
image = image.convert("RGB")
|
@@ -39,7 +38,7 @@ def predict(image, input_text):
|
|
39 |
image = gr.components.Image(type="pil", label="Chart Image")
|
40 |
input_prompt = gr.components.Textbox(label="Input Prompt")
|
41 |
model_output = gr.components.Textbox(label="Model Output")
|
42 |
-
examples = [["chart_example_1.png", "
|
43 |
["chart_example_2.png", "What is the share of respondants who prefer Facebook Messenger in the 30-59 age group?"]]
|
44 |
|
45 |
title = "Interactive Gradio Demo for ChartInstruct-Llama2 model"
|
|
|
17 |
@spaces.GPU
|
18 |
def predict(image, input_text):
|
19 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
20 |
|
21 |
input_prompt = f"<image>\n Question: {input_text} Answer: "
|
22 |
image = image.convert("RGB")
|
|
|
38 |
image = gr.components.Image(type="pil", label="Chart Image")
|
39 |
input_prompt = gr.components.Textbox(label="Input Prompt")
|
40 |
model_output = gr.components.Textbox(label="Model Output")
|
41 |
+
examples = [["chart_example_1.png", "Describe the trend of the mortality rates for children before age 5"],
|
42 |
["chart_example_2.png", "What is the share of respondants who prefer Facebook Messenger in the 30-59 age group?"]]
|
43 |
|
44 |
title = "Interactive Gradio Demo for ChartInstruct-Llama2 model"
|