Spaces:
Sleeping
Sleeping
max_new_tokens max_tokens
Browse files
app.py
CHANGED
@@ -140,7 +140,7 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, How can I help you today?"
|
|
140 |
output = client.chat.completions.create(
|
141 |
formatted_request,
|
142 |
temperature=temp_value,#0.5
|
143 |
-
|
144 |
stream=True
|
145 |
)
|
146 |
# Create a placeholder for the streaming response
|
|
|
140 |
output = client.chat.completions.create(
|
141 |
formatted_request,
|
142 |
temperature=temp_value,#0.5
|
143 |
+
max_tokens=3000,
|
144 |
stream=True
|
145 |
)
|
146 |
# Create a placeholder for the streaming response
|