Update app.py
Browse files
app.py
CHANGED
@@ -134,17 +134,12 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
|
|
134 |
|
135 |
report = []
|
136 |
res_box = st.empty()
|
137 |
-
|
138 |
-
model='gpt-3.5-turbo',
|
139 |
-
messages=conversation,
|
140 |
-
temperature=0.5,
|
141 |
-
stream=True
|
142 |
-
)
|
143 |
collected_chunks = []
|
144 |
collected_messages = []
|
145 |
|
146 |
|
147 |
-
for chunk in response:
|
148 |
#collected_chunks.append(chunk) # save the event response
|
149 |
#chunk_message = chunk['choices'][0]['delta'] # extract the message
|
150 |
#collected_messages.append(chunk_message) # save the message
|
@@ -153,7 +148,13 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
|
|
153 |
# into a single string,
|
154 |
# then strip out any empty strings
|
155 |
|
156 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
157 |
result = "".join(report).strip()
|
158 |
result = result.replace("\n", "")
|
159 |
res_box.markdown(f'*{result}*')
|
|
|
134 |
|
135 |
report = []
|
136 |
res_box = st.empty()
|
137 |
+
|
|
|
|
|
|
|
|
|
|
|
138 |
collected_chunks = []
|
139 |
collected_messages = []
|
140 |
|
141 |
|
142 |
+
#for chunk in response:
|
143 |
#collected_chunks.append(chunk) # save the event response
|
144 |
#chunk_message = chunk['choices'][0]['delta'] # extract the message
|
145 |
#collected_messages.append(chunk_message) # save the message
|
|
|
148 |
# into a single string,
|
149 |
# then strip out any empty strings
|
150 |
|
151 |
+
for resp in openai.ChatCompletion.create(
|
152 |
+
model='gpt-3.5-turbo',
|
153 |
+
messages=conversation,
|
154 |
+
temperature=0.5,
|
155 |
+
stream=True
|
156 |
+
):
|
157 |
+
report.append(resp.choices[0].text)
|
158 |
result = "".join(report).strip()
|
159 |
result = result.replace("\n", "")
|
160 |
res_box.markdown(f'*{result}*')
|