Spaces:
Sleeping
Sleeping
added sample questions
Browse files- app.py +36 -5
- requirements.txt +2 -1
app.py
CHANGED
@@ -1,13 +1,16 @@
|
|
1 |
|
2 |
-
from omegaconf import OmegaConf
|
3 |
-
import streamlit as st
|
4 |
import os
|
5 |
from PIL import Image
|
6 |
import sys
|
|
|
|
|
7 |
import requests
|
8 |
from typing import Tuple
|
9 |
from bs4 import BeautifulSoup
|
10 |
|
|
|
|
|
|
|
11 |
from dotenv import load_dotenv
|
12 |
load_dotenv(override=True)
|
13 |
|
@@ -149,6 +152,7 @@ def initialize_agent(_cfg):
|
|
149 |
- Never discuss politics, and always respond politely.
|
150 |
- This is important: when you include links to Hacker News stories, use the actual title of the story as the link's displayed text.
|
151 |
Don't use text like "Source" which doesn't tell the user what the link is about.
|
|
|
152 |
- Give slight preference to newer stories when answering questions.
|
153 |
"""
|
154 |
|
@@ -169,12 +173,22 @@ def initialize_agent(_cfg):
|
|
169 |
def toggle_logs():
|
170 |
st.session_state.show_logs = not st.session_state.show_logs
|
171 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
172 |
def launch_bot():
|
173 |
def reset():
|
174 |
st.session_state.messages = [{"role": "assistant", "content": initial_prompt, "avatar": "π¦"}]
|
175 |
st.session_state.thinking_message = "Agent at work..."
|
176 |
st.session_state.log_messages = []
|
177 |
st.session_state.prompt = None
|
|
|
178 |
st.session_state.show_logs = False
|
179 |
|
180 |
st.set_page_config(page_title="Hacker News Bot", layout="wide")
|
@@ -183,8 +197,12 @@ def launch_bot():
|
|
183 |
'customer_id': str(os.environ['VECTARA_CUSTOMER_ID']),
|
184 |
'corpus_id': str(os.environ['VECTARA_CORPUS_ID']),
|
185 |
'api_key': str(os.environ['VECTARA_API_KEY']),
|
|
|
186 |
})
|
187 |
st.session_state.cfg = cfg
|
|
|
|
|
|
|
188 |
reset()
|
189 |
|
190 |
cfg = st.session_state.cfg
|
@@ -219,8 +237,18 @@ def launch_bot():
|
|
219 |
with st.chat_message(message["role"], avatar=message["avatar"]):
|
220 |
st.write(message["content"])
|
221 |
|
|
|
|
|
|
|
|
|
|
|
|
|
222 |
# User-provided prompt
|
223 |
-
if
|
|
|
|
|
|
|
|
|
224 |
st.session_state.messages.append({"role": "user", "content": prompt, "avatar": 'π§βπ»'})
|
225 |
st.session_state.prompt = prompt # Save the prompt in session state
|
226 |
st.session_state.log_messages = []
|
@@ -228,7 +256,8 @@ def launch_bot():
|
|
228 |
with st.chat_message("user", avatar='π§βπ»'):
|
229 |
print(f"Starting new question: {prompt}\n")
|
230 |
st.write(prompt)
|
231 |
-
|
|
|
232 |
# Generate a new response if last message is not from assistant
|
233 |
if st.session_state.prompt:
|
234 |
with st.chat_message("assistant", avatar='π€'):
|
@@ -238,7 +267,9 @@ def launch_bot():
|
|
238 |
message = {"role": "assistant", "content": res, "avatar": 'π€'}
|
239 |
st.session_state.messages.append(message)
|
240 |
st.markdown(res)
|
241 |
-
|
|
|
|
|
242 |
|
243 |
log_placeholder = st.empty()
|
244 |
with log_placeholder.container():
|
|
|
1 |
|
|
|
|
|
2 |
import os
|
3 |
from PIL import Image
|
4 |
import sys
|
5 |
+
|
6 |
+
from omegaconf import OmegaConf
|
7 |
import requests
|
8 |
from typing import Tuple
|
9 |
from bs4 import BeautifulSoup
|
10 |
|
11 |
+
import streamlit as st
|
12 |
+
from streamlit_pills import pills
|
13 |
+
|
14 |
from dotenv import load_dotenv
|
15 |
load_dotenv(override=True)
|
16 |
|
|
|
152 |
- Never discuss politics, and always respond politely.
|
153 |
- This is important: when you include links to Hacker News stories, use the actual title of the story as the link's displayed text.
|
154 |
Don't use text like "Source" which doesn't tell the user what the link is about.
|
155 |
+
- Don't include external links in your responses unless the user asks for them.
|
156 |
- Give slight preference to newer stories when answering questions.
|
157 |
"""
|
158 |
|
|
|
173 |
def toggle_logs():
|
174 |
st.session_state.show_logs = not st.session_state.show_logs
|
175 |
|
176 |
+
def show_example_questions():
|
177 |
+
if len(st.session_state.example_messages) > 0 and st.session_state.first_turn:
|
178 |
+
selected_example = pills("Queries to Try:", st.session_state.example_messages, index=None)
|
179 |
+
if selected_example:
|
180 |
+
st.session_state.ex_prompt = selected_example
|
181 |
+
st.session_state.first_turn = False
|
182 |
+
return True
|
183 |
+
return False
|
184 |
+
|
185 |
def launch_bot():
|
186 |
def reset():
|
187 |
st.session_state.messages = [{"role": "assistant", "content": initial_prompt, "avatar": "π¦"}]
|
188 |
st.session_state.thinking_message = "Agent at work..."
|
189 |
st.session_state.log_messages = []
|
190 |
st.session_state.prompt = None
|
191 |
+
st.session_state.first_turn = True
|
192 |
st.session_state.show_logs = False
|
193 |
|
194 |
st.set_page_config(page_title="Hacker News Bot", layout="wide")
|
|
|
197 |
'customer_id': str(os.environ['VECTARA_CUSTOMER_ID']),
|
198 |
'corpus_id': str(os.environ['VECTARA_CORPUS_ID']),
|
199 |
'api_key': str(os.environ['VECTARA_API_KEY']),
|
200 |
+
'examples': os.environ.get('QUERY_EXAMPLES', None)
|
201 |
})
|
202 |
st.session_state.cfg = cfg
|
203 |
+
st.session_state.ex_prompt = None
|
204 |
+
example_messages = [example.strip() for example in cfg.examples.split(",")] if cfg.examples else []
|
205 |
+
st.session_state.example_messages = [em for em in example_messages if len(em)>0]
|
206 |
reset()
|
207 |
|
208 |
cfg = st.session_state.cfg
|
|
|
237 |
with st.chat_message(message["role"], avatar=message["avatar"]):
|
238 |
st.write(message["content"])
|
239 |
|
240 |
+
example_container = st.empty()
|
241 |
+
with example_container:
|
242 |
+
if show_example_questions():
|
243 |
+
example_container.empty()
|
244 |
+
st.rerun()
|
245 |
+
|
246 |
# User-provided prompt
|
247 |
+
if st.session_state.ex_prompt:
|
248 |
+
prompt = st.session_state.ex_prompt
|
249 |
+
else:
|
250 |
+
prompt = st.chat_input()
|
251 |
+
if prompt:
|
252 |
st.session_state.messages.append({"role": "user", "content": prompt, "avatar": 'π§βπ»'})
|
253 |
st.session_state.prompt = prompt # Save the prompt in session state
|
254 |
st.session_state.log_messages = []
|
|
|
256 |
with st.chat_message("user", avatar='π§βπ»'):
|
257 |
print(f"Starting new question: {prompt}\n")
|
258 |
st.write(prompt)
|
259 |
+
st.session_state.ex_prompt = None
|
260 |
+
|
261 |
# Generate a new response if last message is not from assistant
|
262 |
if st.session_state.prompt:
|
263 |
with st.chat_message("assistant", avatar='π€'):
|
|
|
267 |
message = {"role": "assistant", "content": res, "avatar": 'π€'}
|
268 |
st.session_state.messages.append(message)
|
269 |
st.markdown(res)
|
270 |
+
st.session_state.ex_prompt = None
|
271 |
+
st.session_state.prompt = None
|
272 |
+
st.rerun()
|
273 |
|
274 |
log_placeholder = st.empty()
|
275 |
with log_placeholder.container():
|
requirements.txt
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
omegaconf==2.3.0
|
2 |
-
streamlit==1.32.2
|
3 |
pydantic==1.10.15
|
4 |
python-dotenv==1.0.1
|
|
|
|
|
5 |
git+https://{GITHUB_TOKEN}@github.com/vectara/vectara-agent.git
|
|
|
1 |
omegaconf==2.3.0
|
|
|
2 |
pydantic==1.10.15
|
3 |
python-dotenv==1.0.1
|
4 |
+
streamlit==1.32.2
|
5 |
+
streamlit_pills==0.3.0
|
6 |
git+https://{GITHUB_TOKEN}@github.com/vectara/vectara-agent.git
|