Spaces:
Running
on
Zero
Running
on
Zero
v1
Browse files
app.py
CHANGED
@@ -27,7 +27,7 @@ freeze_model(meteor)
|
|
27 |
# previous length
|
28 |
previous_length = 0
|
29 |
|
30 |
-
|
31 |
def threading_function(inputs, image_token_number, streamer):
|
32 |
# Meteor Mamba
|
33 |
mmamba_inputs = mmamba.eval_process(inputs=inputs, tokenizer=tok_meteor, device=device, img_token_number=image_token_number)
|
@@ -58,7 +58,7 @@ def add_message(history, message):
|
|
58 |
history.append((message["text"], None))
|
59 |
return history, gr.MultimodalTextbox(value=None, interactive=False)
|
60 |
|
61 |
-
@spaces.GPU
|
62 |
def bot_streaming(message, history):
|
63 |
|
64 |
# prompt type -> input prompt
|
|
|
27 |
# previous length
|
28 |
previous_length = 0
|
29 |
|
30 |
+
@spaces.GPU
|
31 |
def threading_function(inputs, image_token_number, streamer):
|
32 |
# Meteor Mamba
|
33 |
mmamba_inputs = mmamba.eval_process(inputs=inputs, tokenizer=tok_meteor, device=device, img_token_number=image_token_number)
|
|
|
58 |
history.append((message["text"], None))
|
59 |
return history, gr.MultimodalTextbox(value=None, interactive=False)
|
60 |
|
61 |
+
@spaces.GPU
|
62 |
def bot_streaming(message, history):
|
63 |
|
64 |
# prompt type -> input prompt
|