Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -39,7 +39,6 @@ llm = Llama.from_pretrained(
|
|
39 |
n_ctx=2048, # n_ctx should be increased to accommodate the image embedding
|
40 |
)
|
41 |
|
42 |
-
|
43 |
@spaces.GPU(queue=False)
|
44 |
def stream_chat(message, history: list, temperature: float, max_new_tokens: int):
|
45 |
print(f'message is - {message}')
|
@@ -92,7 +91,6 @@ def stream_chat(message, history: list, temperature: float, max_new_tokens: int)
|
|
92 |
messages = messages,
|
93 |
temperature=temperature,
|
94 |
max_tokens=max_new_tokens,
|
95 |
-
stream=True
|
96 |
)
|
97 |
|
98 |
return response["choices"][0]["text"]
|
|
|
39 |
n_ctx=2048, # n_ctx should be increased to accommodate the image embedding
|
40 |
)
|
41 |
|
|
|
42 |
@spaces.GPU(queue=False)
|
43 |
def stream_chat(message, history: list, temperature: float, max_new_tokens: int):
|
44 |
print(f'message is - {message}')
|
|
|
91 |
messages = messages,
|
92 |
temperature=temperature,
|
93 |
max_tokens=max_new_tokens,
|
|
|
94 |
)
|
95 |
|
96 |
return response["choices"][0]["text"]
|