Spaces:
Sleeping
Sleeping
Hmm
Browse files
app.py
CHANGED
@@ -136,13 +136,14 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, How can I help you today?"
|
|
136 |
try:
|
137 |
# Use the format_promt function to prepare the request
|
138 |
formatted_request = format_promt(prompt, custom_instructions, temp_value)
|
139 |
-
|
140 |
-
output =
|
141 |
formatted_request,
|
142 |
temperature=temp_value,#0.5
|
143 |
-
|
144 |
stream=True
|
145 |
)
|
|
|
146 |
# Create a placeholder for the streaming response
|
147 |
message_placeholder = st.empty()
|
148 |
full_response = ""
|
|
|
136 |
try:
|
137 |
# Use the format_promt function to prepare the request
|
138 |
formatted_request = format_promt(prompt, custom_instructions, temp_value)
|
139 |
+
|
140 |
+
output = text_generation(
|
141 |
formatted_request,
|
142 |
temperature=temp_value,#0.5
|
143 |
+
max_new_tokens=1000,
|
144 |
stream=True
|
145 |
)
|
146 |
+
|
147 |
# Create a placeholder for the streaming response
|
148 |
message_placeholder = st.empty()
|
149 |
full_response = ""
|