Tuana commited on
Commit
b797bbf
1 Parent(s): a576a00
Files changed (3) hide show
  1. app.py +6 -3
  2. requirements.txt +1 -1
  3. utils/haystack.py +2 -4
app.py CHANGED
@@ -11,12 +11,15 @@ from utils.ui import reset_results, set_initial_state
11
 
12
  set_initial_state()
13
 
14
- st.write("# What have they been tweeting about lately?")
15
 
 
16
  # Search bar
17
- username = st.text_input("", value=st.session_state.username, max_chars=100, on_change=reset_results)
 
18
 
19
- run_pressed = st.button("Run")
 
20
 
21
  run_query = (
22
  run_pressed or username != st.session_state.username
 
11
 
12
  set_initial_state()
13
 
14
+ st.write("# 🐤 What have they been tweeting about lately?")
15
 
16
+ search_bar, button = st.columns(2)
17
  # Search bar
18
+ with search_bar:
19
+ username = st.text_input("", value=st.session_state.username, on_change=reset_results, label_visibility="collapsed")
20
 
21
+ with button:
22
+ run_pressed = st.button("Seach tweets")
23
 
24
  run_query = (
25
  run_pressed or username != st.session_state.username
requirements.txt CHANGED
@@ -1,5 +1,5 @@
1
  farm-haystack==1.13.0
2
- streamlit==1.10.0
3
  markdown
4
  st-annotated-text
5
  python-dotenv
 
1
  farm-haystack==1.13.0
2
+ streamlit==1.18.0
3
  markdown
4
  st-annotated-text
5
  python-dotenv
utils/haystack.py CHANGED
@@ -5,9 +5,7 @@ from utils.config import TWITTER_BEARER, OEPN_AI_KEY
5
  from haystack.nodes import PromptNode, PromptTemplate
6
 
7
  # cached to make index and models load only at start
8
- @st.cache(
9
- hash_funcs={"builtins.SwigPyObject": lambda _: None}, allow_output_mutation=True
10
- )
11
  def start_haystack():
12
  #Use this function to contruct a pipeline
13
  prompt_node = PromptNode(model_name_or_path="text-davinci-003", api_key=OEPN_AI_KEY)
@@ -40,7 +38,7 @@ def start_haystack():
40
 
41
  prompter, template = start_haystack()
42
 
43
- @st.cache(allow_output_mutation=True)
44
  def query(username):
45
 
46
  bearer_token = TWITTER_BEARER
 
5
  from haystack.nodes import PromptNode, PromptTemplate
6
 
7
  # cached to make index and models load only at start
8
+ @st.cache_resource
 
 
9
  def start_haystack():
10
  #Use this function to contruct a pipeline
11
  prompt_node = PromptNode(model_name_or_path="text-davinci-003", api_key=OEPN_AI_KEY)
 
38
 
39
  prompter, template = start_haystack()
40
 
41
+ @st.cache_data(show_spinner=False)
42
  def query(username):
43
 
44
  bearer_token = TWITTER_BEARER