Update app.py
Browse files
app.py
CHANGED
@@ -127,7 +127,17 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
|
|
127 |
conversation.append({'role': 'user', 'content': prompt})
|
128 |
if len(document_section)>0:
|
129 |
conversation.append({'role': 'assistant', 'content': document_section})
|
130 |
-
response = openai.ChatCompletion.create(model=model, messages=conversation)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
131 |
#return response
|
132 |
return response['choices'][0]['message']['content']
|
133 |
|
|
|
127 |
conversation.append({'role': 'user', 'content': prompt})
|
128 |
if len(document_section)>0:
|
129 |
conversation.append({'role': 'assistant', 'content': document_section})
|
130 |
+
#response = openai.ChatCompletion.create(model=model, messages=conversation)
|
131 |
+
|
132 |
+
# streaming response
|
133 |
+
result_textarea = st.empty()
|
134 |
+
results=[]
|
135 |
+
for responses in openai.ChatCompletion.create(model=model, messages=conversation, stream=True)
|
136 |
+
results.append(responses.choices[0].text)
|
137 |
+
result = "".join(results).strip()
|
138 |
+
result = result.replace('\n','')
|
139 |
+
result_textarea.markdown(f'*{result}*')
|
140 |
+
|
141 |
#return response
|
142 |
return response['choices'][0]['message']['content']
|
143 |
|