mtyrrell commited on
Commit
63a633d
·
1 Parent(s): 9d81a2d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -26
app.py CHANGED
@@ -3,27 +3,27 @@ import os
3
  import re
4
  import json
5
  from dotenv import load_dotenv
6
- from haystack.nodes.prompt import PromptNode, PromptTemplate
7
- from haystack.nodes import EmbeddingRetriever
8
- from haystack import Pipeline
9
  import numpy as np
10
  import pandas as pd
11
- from haystack.document_stores import FAISSDocumentStore, PineconeDocumentStore
12
- from haystack.nodes import EmbeddingRetriever
13
  from haystack.schema import Document
14
- from huggingface_hub import login, HfApi, hf_hub_download, InferenceClient
 
 
15
  import openai
16
 
17
- # Get HF token
18
- hf_token = os.environ["HF_TOKEN"]
19
- login(token=hf_token, add_to_git_credential=True)
 
 
20
 
21
  # Get openai API key
22
- openai_key = os.environ["OPENAI_API_KEY"]
23
  openai.api_key = os.environ["OPENAI_API_KEY"]
 
24
 
25
  # Get openai API key
26
  pinecone_key = os.environ["PINECONE_API_KEY"]
 
27
 
28
  #___________________________________________________________________________________________________________
29
 
@@ -159,6 +159,7 @@ def run_query(input_text, country, model_sel):
159
  res_box.success(result) # output to response text box
160
 
161
  references = get_refs(docs, result) # extract references from the generated text
 
162
  # else:
163
  # res = client.text_generation(get_prompt(docs, query=input_query), max_new_tokens=4000, temperature=0.01, model=model)
164
  # output = res
@@ -172,8 +173,8 @@ def run_query(input_text, country, model_sel):
172
 
173
  #___________________________________________________________________________________________________________
174
 
 
175
  with st.sidebar:
176
- # Dropdown selectbox
177
  country = st.sidebar.multiselect('Filter by country:', country_options)
178
  vulnerabilities_cat = st.sidebar.multiselect('Filter by vulnerabilities category:', vulnerability_options)
179
  with st.expander("ℹ️ - About filters", expanded=False):
@@ -185,10 +186,12 @@ with st.sidebar:
185
  """
186
  )
187
 
 
188
  with st.container():
189
  st.markdown("<h2 style='text-align: center;'> Climate Policy Documents: Vulnerabilities Analysis Q&A </h2>", unsafe_allow_html=True)
190
  st.write(' ')
191
 
 
192
  with st.expander("ℹ️ - About this app", expanded=False):
193
  st.write(
194
  """
@@ -206,31 +209,23 @@ with st.expander("ℹ️ - About this app", expanded=False):
206
 
207
  Make sure your filters match the countries you have specified for the analysis!
208
  """)
209
- # st.write(country)
210
- # st.write(vulnerabilities_cat)
211
 
212
 
213
- # Dropdown selectbox: model
214
  # model_sel = st.selectbox('Select an LLM:', model_options)
215
  model_sel = "chatGPT"
216
 
217
  #----Model Select logic-------
218
  if model_sel == "chatGPT":
219
  model_name = "gpt-3.5-turbo"
220
- # # Initialize the PromptNode
221
- # pn = PromptNode(model_name_or_path=model_name, default_prompt_template=template, api_key=openai_key, max_length=2000, model_kwargs={"generation_kwargs": {"do_sample": False, "temperature": 0}})
222
-
223
- # # Initialize the pipeline
224
- # pipe = Pipeline()
225
- # pipe.add_node(component=pn, name="prompt_node", inputs=["Query"])
226
- else:
227
- # Currently disabled
228
- model = "meta-llama/Llama-2-70b-chat-hf"
229
- # Instantiate the inference client
230
- client = InferenceClient()
231
 
 
 
 
 
232
 
233
- if selected_example == "-":
 
234
  text = st.text_area('Enter your question in the text box below using natural language or select an example from above:')
235
  else:
236
  text = st.text_area('Enter your question in the text box below using natural language or select an example from above:', value=selected_example)
@@ -242,3 +237,7 @@ if st.button('Submit'):
242
  run_query(text, country=country, model_sel=model_sel)
243
 
244
 
 
 
 
 
 
3
  import re
4
  import json
5
  from dotenv import load_dotenv
 
 
 
6
  import numpy as np
7
  import pandas as pd
 
 
8
  from haystack.schema import Document
9
+ from haystack.document_stores import PineconeDocumentStore
10
+ from haystack.nodes import EmbeddingRetriever
11
+ # from huggingface_hub import login, HfApi, hf_hub_download, InferenceClient
12
  import openai
13
 
14
+ # for local st testing, may need to run source ~/.zshrc to point to env vars
15
+
16
+ # Get HF token (used for llama2)
17
+ # hf_token = os.environ["HF_TOKEN"]
18
+ # login(token=hf_token, add_to_git_credential=True)
19
 
20
  # Get openai API key
 
21
  openai.api_key = os.environ["OPENAI_API_KEY"]
22
+ # openai.api_key = "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"
23
 
24
  # Get openai API key
25
  pinecone_key = os.environ["PINECONE_API_KEY"]
26
+ # pinecone_key = "c3f5717c-f43a-46d0-893e-02b44dbcf13b"
27
 
28
  #___________________________________________________________________________________________________________
29
 
 
159
  res_box.success(result) # output to response text box
160
 
161
  references = get_refs(docs, result) # extract references from the generated text
162
+ # Llama2 selection (was running on HF)
163
  # else:
164
  # res = client.text_generation(get_prompt(docs, query=input_query), max_new_tokens=4000, temperature=0.01, model=model)
165
  # output = res
 
173
 
174
  #___________________________________________________________________________________________________________
175
 
176
+ # Sidebar (filters)
177
  with st.sidebar:
 
178
  country = st.sidebar.multiselect('Filter by country:', country_options)
179
  vulnerabilities_cat = st.sidebar.multiselect('Filter by vulnerabilities category:', vulnerability_options)
180
  with st.expander("ℹ️ - About filters", expanded=False):
 
186
  """
187
  )
188
 
189
+ # Main window title
190
  with st.container():
191
  st.markdown("<h2 style='text-align: center;'> Climate Policy Documents: Vulnerabilities Analysis Q&A </h2>", unsafe_allow_html=True)
192
  st.write(' ')
193
 
194
+ # Main window instructions
195
  with st.expander("ℹ️ - About this app", expanded=False):
196
  st.write(
197
  """
 
209
 
210
  Make sure your filters match the countries you have specified for the analysis!
211
  """)
 
 
212
 
213
 
214
+ # Dropdown selectbox: model (currently not used)
215
  # model_sel = st.selectbox('Select an LLM:', model_options)
216
  model_sel = "chatGPT"
217
 
218
  #----Model Select logic-------
219
  if model_sel == "chatGPT":
220
  model_name = "gpt-3.5-turbo"
 
 
 
 
 
 
 
 
 
 
 
221
 
222
+ # else:
223
+ # model = "meta-llama/Llama-2-70b-chat-hf"
224
+ # # Instantiate the inference client
225
+ # client = InferenceClient()
226
 
227
+ # get prompt from user or example prompt
228
+ if selected_example == "-": #hyphen used as a work around (st won't allow null selection)
229
  text = st.text_area('Enter your question in the text box below using natural language or select an example from above:')
230
  else:
231
  text = st.text_area('Enter your question in the text box below using natural language or select an example from above:', value=selected_example)
 
237
  run_query(text, country=country, model_sel=model_sel)
238
 
239
 
240
+
241
+
242
+
243
+