Spaces:
Running
on
Zero
Running
on
Zero
aixsatoshi
commited on
Commit
•
a4a5e65
1
Parent(s):
c8daca5
Update app.py
Browse files
app.py
CHANGED
@@ -51,7 +51,7 @@ llm = None
|
|
51 |
llm_model = None
|
52 |
|
53 |
@spaces.GPU(duration=120)
|
54 |
-
|
55 |
message,
|
56 |
history: list[tuple[str, str]],
|
57 |
model,
|
@@ -61,9 +61,9 @@ async def respond(
|
|
61 |
top_p,
|
62 |
top_k,
|
63 |
repeat_penalty,
|
64 |
-
|
65 |
):
|
66 |
-
chat_template = MessagesFormatterType
|
67 |
|
68 |
global llm
|
69 |
global llm_model
|
@@ -118,12 +118,9 @@ async def respond(
|
|
118 |
)
|
119 |
|
120 |
outputs = ""
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
yield outputs
|
125 |
-
except StopAsyncIteration:
|
126 |
-
pass # Handle end of iteration gracefully
|
127 |
|
128 |
description = """<p align="center">Defaults to Oumuamua-7b-RP (you can switch to other models from additional inputs)</p>
|
129 |
<p><center>
|
|
|
51 |
llm_model = None
|
52 |
|
53 |
@spaces.GPU(duration=120)
|
54 |
+
def respond(
|
55 |
message,
|
56 |
history: list[tuple[str, str]],
|
57 |
model,
|
|
|
61 |
top_p,
|
62 |
top_k,
|
63 |
repeat_penalty,
|
64 |
+
template,
|
65 |
):
|
66 |
+
chat_template = MessagesFormatterType[template]
|
67 |
|
68 |
global llm
|
69 |
global llm_model
|
|
|
118 |
)
|
119 |
|
120 |
outputs = ""
|
121 |
+
for output in stream:
|
122 |
+
outputs += output
|
123 |
+
yield outputs
|
|
|
|
|
|
|
124 |
|
125 |
description = """<p align="center">Defaults to Oumuamua-7b-RP (you can switch to other models from additional inputs)</p>
|
126 |
<p><center>
|