Spaces:
Running
on
Zero
Running
on
Zero
jiminHuang
commited on
Commit
•
60c2244
1
Parent(s):
3b8079e
Update app.py
Browse files
app.py
CHANGED
@@ -26,7 +26,7 @@ from tqdm import tqdm
|
|
26 |
import json
|
27 |
|
28 |
root_path = os.path.dirname(os.path.abspath(__file__))
|
29 |
-
print(
|
30 |
os.environ['GRADIO_TEMP_DIR'] = root_path
|
31 |
|
32 |
parser = argparse.ArgumentParser()
|
@@ -52,6 +52,7 @@ tokenizer, llava_model, image_processor, context_len = load_pretrained_model(
|
|
52 |
|
53 |
@spaces.GPU
|
54 |
def bot_streaming(message, history):
|
|
|
55 |
print(message)
|
56 |
image_file = None
|
57 |
if message["files"]:
|
@@ -70,7 +71,7 @@ def bot_streaming(message, history):
|
|
70 |
|
71 |
streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
|
72 |
def generate():
|
73 |
-
print('
|
74 |
output = chat_llava(
|
75 |
args=args,
|
76 |
image_file=image_file,
|
@@ -92,6 +93,7 @@ def bot_streaming(message, history):
|
|
92 |
buffer += new_text
|
93 |
generated_text_without_prompt = buffer
|
94 |
time.sleep(0.06)
|
|
|
95 |
yield generated_text_without_prompt
|
96 |
|
97 |
chatbot = gr.Chatbot(scale=1)
|
@@ -104,8 +106,6 @@ with gr.Blocks(fill_height=True) as demo:
|
|
104 |
{"text": "What is in this picture?", "files": ["http://images.cocodataset.org/val2017/000000039769.jpg"]},
|
105 |
{"text": "What is the spending on Healthcare in July? A. 450 B. 600 C. 520 D. 510", "files": ["image_107.png"]},
|
106 |
{"text": "If 2012 net periodic opeb cost increased at the same pace as the pension cost, what would the estimated 2013 cost be in millions? A. 14.83333 B. 12.5 C. 15.5 D. 13.5", "files": ["image_659.png"]},
|
107 |
-
|
108 |
-
|
109 |
],
|
110 |
description="",
|
111 |
stop_btn="Stop Generation",
|
|
|
26 |
import json
|
27 |
|
28 |
root_path = os.path.dirname(os.path.abspath(__file__))
|
29 |
+
print(root_path)
|
30 |
os.environ['GRADIO_TEMP_DIR'] = root_path
|
31 |
|
32 |
parser = argparse.ArgumentParser()
|
|
|
52 |
|
53 |
@spaces.GPU
|
54 |
def bot_streaming(message, history):
|
55 |
+
print ("triggered")
|
56 |
print(message)
|
57 |
image_file = None
|
58 |
if message["files"]:
|
|
|
71 |
|
72 |
streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
|
73 |
def generate():
|
74 |
+
print('Running chat')
|
75 |
output = chat_llava(
|
76 |
args=args,
|
77 |
image_file=image_file,
|
|
|
93 |
buffer += new_text
|
94 |
generated_text_without_prompt = buffer
|
95 |
time.sleep(0.06)
|
96 |
+
print (generated_text_without_prompt)
|
97 |
yield generated_text_without_prompt
|
98 |
|
99 |
chatbot = gr.Chatbot(scale=1)
|
|
|
106 |
{"text": "What is in this picture?", "files": ["http://images.cocodataset.org/val2017/000000039769.jpg"]},
|
107 |
{"text": "What is the spending on Healthcare in July? A. 450 B. 600 C. 520 D. 510", "files": ["image_107.png"]},
|
108 |
{"text": "If 2012 net periodic opeb cost increased at the same pace as the pension cost, what would the estimated 2013 cost be in millions? A. 14.83333 B. 12.5 C. 15.5 D. 13.5", "files": ["image_659.png"]},
|
|
|
|
|
109 |
],
|
110 |
description="",
|
111 |
stop_btn="Stop Generation",
|