wop commited on
Commit
5f03bac
·
verified ·
1 Parent(s): 813e436

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -33
app.py CHANGED
@@ -92,45 +92,61 @@ def search_web(query):
92
  except Exception as e:
93
  return f"An error occurred: {e}"
94
 
95
- full_response = None # Initialize full_response to None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96
 
97
- if prompt := st.chat_input("Enter your prompt here..."):
98
  st.session_state.messages.append({"role": "user", "content": prompt})
99
 
100
  with st.chat_message("user", avatar="🕺"):
101
  st.markdown(prompt)
102
 
103
  try:
104
- if "search for" in prompt.lower():
105
- query = prompt.lower().replace("search for", "").strip()
106
- search_results = search_web(query)
107
- formatted_results = "\n\n".join([f"Title: {result['title']}\nURL: {result['url']}\nSnippet: {result['snippet']}" for result in search_results])
108
- st.session_state.messages.append({"role": "assistant", "content": formatted_results})
109
- else:
110
- chat_completion = client.chat.completions.create(
111
- model=model_option,
112
- messages=[
113
- {"role": m["role"], "content": m["content"]}
114
- for m in st.session_state.messages
115
- ],
116
- max_tokens=max_tokens,
117
- stream=True,
118
- )
119
-
120
- with st.chat_message("assistant", avatar="🤖"):
121
- chat_responses_generator = generate_chat_responses(chat_completion)
122
- full_response = st.write_stream(chat_responses_generator)
123
  except Exception as e:
124
  st.error(e, icon="🚨")
125
-
126
- # Check if full_response is defined before using it
127
- if full_response is not None:
128
- if isinstance(full_response, str):
129
- st.session_state.messages.append(
130
- {"role": "assistant", "content": full_response}
131
- )
132
- else:
133
- combined_response = "\n".join(str(item) for item in full_response)
134
- st.session_state.messages.append(
135
- {"role": "assistant", "content": combined_response}
136
- )
 
92
  except Exception as e:
93
  return f"An error occurred: {e}"
94
 
95
+ def run_conversation(user_prompt):
96
+ # Step 1: send the conversation and available functions to the model
97
+ messages = [
98
+ {
99
+ "role": "system",
100
+ "content": "You are a function calling LLM that uses the data extracted from the get_game_score function to answer questions around NBA game scores. Include the team and their opponent in your response."
101
+ },
102
+ {
103
+ "role": "user",
104
+ "content": user_prompt,
105
+ }
106
+ ]
107
+ tools = [
108
+ {
109
+ "type": "internet",
110
+ "internet": {
111
+ "allow": ["search_web"],
112
+ "description": "Search the web for information.",
113
+ "parameters": {
114
+ "type": "object",
115
+ "properties": {
116
+ "query": {
117
+ "type": "string",
118
+ "description": "The search query.",
119
+ }
120
+ },
121
+ "required": ["query"],
122
+ },
123
+ },
124
+ }
125
+ ]
126
+ response = client.chat.completions.create(
127
+ model=model_option,
128
+ messages=messages,
129
+ tools=tools,
130
+ tool_choice="auto",
131
+ max_tokens=max_tokens
132
+ )
133
+
134
+ for chunk in response:
135
+ if chunk.choices[0].delta.content:
136
+ yield chunk.choices[0].delta.content
137
 
138
+ if prompt := st.text_input("Enter your prompt here..."):
139
  st.session_state.messages.append({"role": "user", "content": prompt})
140
 
141
  with st.chat_message("user", avatar="🕺"):
142
  st.markdown(prompt)
143
 
144
  try:
145
+ chat_completion = run_conversation(prompt)
146
+
147
+ with st.chat_message("assistant", avatar="🤖"):
148
+ chat_responses_generator = generate_chat_responses(chat_completion)
149
+ for response in chat_responses_generator:
150
+ st.markdown(response)
 
 
 
 
 
 
 
 
 
 
 
 
 
151
  except Exception as e:
152
  st.error(e, icon="🚨")