awacke1 commited on
Commit
6eae6cf
·
1 Parent(s): a7e81ad

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -7
app.py CHANGED
@@ -79,41 +79,38 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
79
  conversation.append({'role': 'user', 'content': prompt})
80
  if len(document_section)>0:
81
  conversation.append({'role': 'assistant', 'content': document_section})
82
-
83
  start_time = time.time()
84
  report = []
85
  res_box = st.empty()
86
  collected_chunks = []
87
  collected_messages = []
88
-
89
  key = os.getenv('OPENAI_API_KEY')
90
  openai.api_key = key
 
91
  for chunk in openai.ChatCompletion.create(
92
  model='gpt-3.5-turbo',
93
  messages=conversation,
94
  temperature=0.5,
95
  stream=True
96
  ):
97
-
98
  collected_chunks.append(chunk) # save the event response
99
  chunk_message = chunk['choices'][0]['delta'] # extract the message
100
  collected_messages.append(chunk_message) # save the message
101
-
102
  content=chunk["choices"][0].get("delta",{}).get("content")
103
-
104
  try:
105
  report.append(content)
106
  if len(content) > 0:
107
  result = "".join(report).strip()
108
- #result = result.replace("\n", "")
109
  res_box.markdown(f'*{result}*')
110
  except:
111
  st.write(' ')
112
-
113
  full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
114
  st.write("Elapsed time:")
115
  st.write(time.time() - start_time)
116
  readitaloud(full_reply_content)
 
 
117
  return full_reply_content
118
 
119
  def chat_with_file_contents(prompt, file_content, model_choice='gpt-3.5-turbo'):
 
79
  conversation.append({'role': 'user', 'content': prompt})
80
  if len(document_section)>0:
81
  conversation.append({'role': 'assistant', 'content': document_section})
 
82
  start_time = time.time()
83
  report = []
84
  res_box = st.empty()
85
  collected_chunks = []
86
  collected_messages = []
 
87
  key = os.getenv('OPENAI_API_KEY')
88
  openai.api_key = key
89
+
90
  for chunk in openai.ChatCompletion.create(
91
  model='gpt-3.5-turbo',
92
  messages=conversation,
93
  temperature=0.5,
94
  stream=True
95
  ):
 
96
  collected_chunks.append(chunk) # save the event response
97
  chunk_message = chunk['choices'][0]['delta'] # extract the message
98
  collected_messages.append(chunk_message) # save the message
 
99
  content=chunk["choices"][0].get("delta",{}).get("content")
 
100
  try:
101
  report.append(content)
102
  if len(content) > 0:
103
  result = "".join(report).strip()
 
104
  res_box.markdown(f'*{result}*')
105
  except:
106
  st.write(' ')
107
+
108
  full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
109
  st.write("Elapsed time:")
110
  st.write(time.time() - start_time)
111
  readitaloud(full_reply_content)
112
+ filename = generate_filename(full_reply_content, choice)
113
+ create_file(filename, prompt, full_reply_content, should_save)
114
  return full_reply_content
115
 
116
  def chat_with_file_contents(prompt, file_content, model_choice='gpt-3.5-turbo'):