gabrielaltay commited on
Commit
729aada
1 Parent(s): da0f003

new layout

Browse files
Files changed (1) hide show
  1. app.py +32 -30
app.py CHANGED
@@ -120,10 +120,12 @@ def render_outreach_links():
120
  nomic_url = f"{nomic_base_url}/{nomic_map_name}/map"
121
  hf_url = "https://huggingface.co/hyperdemocracy"
122
  pc_url = "https://www.pinecone.io/blog/serverless"
 
123
  st.subheader(":brain: About [hyperdemocracy](https://hyperdemocracy.us)")
124
  st.subheader(f":world_map: Visualize [nomic atlas]({nomic_url})")
125
  st.subheader(f":hugging_face: Raw [huggingface datasets]({hf_url})")
126
  st.subheader(f":evergreen_tree: Index [pinecone serverless]({pc_url})")
 
127
 
128
 
129
  def group_docs(docs) -> list[tuple[str, list[Document]]]:
@@ -247,7 +249,7 @@ When you send a query to LegisQA, it will attempt to retrieve relevant content f
247
  This is a research project. The RAG technique helps to ground the LLM response by providing context from a trusted source, but it does not guarantee a high quality response. We encourage you to play around, find questions that work and find questions that fail. There is a small monthly budget dedicated to the OpenAI endpoints. Once that is used up each month, queries will no longer work.
248
 
249
 
250
- ## Sidebar Config
251
 
252
  Use the `Generative Config` to change LLM parameters.
253
  Use the `Retrieval Config` to change the number of chunks retrieved from our congress corpus and to apply various filters to the content before it is retrieved (e.g. filter to a specific set of congresses). Use the `Prompt Config` to try out different document formatting and prompting strategies.
@@ -443,17 +445,6 @@ def render_sidebar():
443
 
444
  def render_query_rag_tab():
445
 
446
- key_prefix = "query_rag"
447
- render_example_queries()
448
-
449
- col1, col2 = st.columns(2)
450
- with col1:
451
- with st.expander("Generative Config"):
452
- render_generative_config(key_prefix)
453
- with col2:
454
- with st.expander("Retrieval Config"):
455
- render_retrieval_config(key_prefix)
456
-
457
  QUERY_RAG_TEMPLATE = """You are an expert legislative analyst. Use the following excerpts from US congressional legislation to respond to the user's query. The excerpts are formatted as a JSON list. Each JSON object has "legis_id", "title", "introduced_date", "sponsor", and "snippets" keys. If a snippet is useful in writing part of your response, then cite the "legis_id", "title", "introduced_date", and "sponsor" in the response. If you don't know how to respond, just tell the user.
458
 
459
  ---
@@ -472,11 +463,22 @@ Query: {query}"""
472
  ]
473
  )
474
 
 
 
 
475
  with st.form(f"{key_prefix}|query_form"):
476
  st.text_area(
477
  "Enter a query that can be answered with congressional legislation:",
478
  key=f"{key_prefix}|query",
479
  )
 
 
 
 
 
 
 
 
480
  query_submitted = st.form_submit_button("Submit")
481
 
482
  if query_submitted:
@@ -567,23 +569,23 @@ Query: {query}"""
567
  )
568
  query_submitted = st.form_submit_button("Submit")
569
 
570
- grp1a, grp2a = st.columns(2)
571
 
572
- with grp1a:
573
- st.header("Group 1")
574
- key_prefix = f"{base_key_prefix}|grp1"
575
- with st.expander("Generative Config"):
576
- render_generative_config(key_prefix)
577
- with st.expander("Retrieval Config"):
578
- render_retrieval_config(key_prefix)
579
 
580
- with grp2a:
581
- st.header("Group 2")
582
- key_prefix = f"{base_key_prefix}|grp2"
583
- with st.expander("Generative Config"):
584
- render_generative_config(key_prefix)
585
- with st.expander("Retrieval Config"):
586
- render_retrieval_config(key_prefix)
587
 
588
  grp1b, grp2b = st.columns(2)
589
  sbs_cols = {"grp1": grp1b, "grp2": grp2b}
@@ -663,9 +665,9 @@ vectorstore = load_pinecone_vectorstore()
663
 
664
  query_rag_tab, query_rag_sbs_tab, guide_tab = st.tabs(
665
  [
666
- "query_rag",
667
- "query_rag_sbs",
668
- "guide",
669
  ]
670
  )
671
 
 
120
  nomic_url = f"{nomic_base_url}/{nomic_map_name}/map"
121
  hf_url = "https://huggingface.co/hyperdemocracy"
122
  pc_url = "https://www.pinecone.io/blog/serverless"
123
+ together_url = "https://www.together.ai/"
124
  st.subheader(":brain: About [hyperdemocracy](https://hyperdemocracy.us)")
125
  st.subheader(f":world_map: Visualize [nomic atlas]({nomic_url})")
126
  st.subheader(f":hugging_face: Raw [huggingface datasets]({hf_url})")
127
  st.subheader(f":evergreen_tree: Index [pinecone serverless]({pc_url})")
128
+ st.subheader(f":pancakes: Inference [together.ai]({together_url})")
129
 
130
 
131
  def group_docs(docs) -> list[tuple[str, list[Document]]]:
 
249
  This is a research project. The RAG technique helps to ground the LLM response by providing context from a trusted source, but it does not guarantee a high quality response. We encourage you to play around, find questions that work and find questions that fail. There is a small monthly budget dedicated to the OpenAI endpoints. Once that is used up each month, queries will no longer work.
250
 
251
 
252
+ ## Config
253
 
254
  Use the `Generative Config` to change LLM parameters.
255
  Use the `Retrieval Config` to change the number of chunks retrieved from our congress corpus and to apply various filters to the content before it is retrieved (e.g. filter to a specific set of congresses). Use the `Prompt Config` to try out different document formatting and prompting strategies.
 
445
 
446
  def render_query_rag_tab():
447
 
 
 
 
 
 
 
 
 
 
 
 
448
  QUERY_RAG_TEMPLATE = """You are an expert legislative analyst. Use the following excerpts from US congressional legislation to respond to the user's query. The excerpts are formatted as a JSON list. Each JSON object has "legis_id", "title", "introduced_date", "sponsor", and "snippets" keys. If a snippet is useful in writing part of your response, then cite the "legis_id", "title", "introduced_date", and "sponsor" in the response. If you don't know how to respond, just tell the user.
449
 
450
  ---
 
463
  ]
464
  )
465
 
466
+ key_prefix = "query_rag"
467
+ render_example_queries()
468
+
469
  with st.form(f"{key_prefix}|query_form"):
470
  st.text_area(
471
  "Enter a query that can be answered with congressional legislation:",
472
  key=f"{key_prefix}|query",
473
  )
474
+ col1, col2 = st.columns(2)
475
+ with col1:
476
+ with st.expander("Generative Config"):
477
+ render_generative_config(key_prefix)
478
+ with col2:
479
+ with st.expander("Retrieval Config"):
480
+ render_retrieval_config(key_prefix)
481
+
482
  query_submitted = st.form_submit_button("Submit")
483
 
484
  if query_submitted:
 
569
  )
570
  query_submitted = st.form_submit_button("Submit")
571
 
572
+ grp1a, grp2a = st.columns(2)
573
 
574
+ with grp1a:
575
+ st.header("Group 1")
576
+ key_prefix = f"{base_key_prefix}|grp1"
577
+ with st.expander("Generative Config"):
578
+ render_generative_config(key_prefix)
579
+ with st.expander("Retrieval Config"):
580
+ render_retrieval_config(key_prefix)
581
 
582
+ with grp2a:
583
+ st.header("Group 2")
584
+ key_prefix = f"{base_key_prefix}|grp2"
585
+ with st.expander("Generative Config"):
586
+ render_generative_config(key_prefix)
587
+ with st.expander("Retrieval Config"):
588
+ render_retrieval_config(key_prefix)
589
 
590
  grp1b, grp2b = st.columns(2)
591
  sbs_cols = {"grp1": grp1b, "grp2": grp2b}
 
665
 
666
  query_rag_tab, query_rag_sbs_tab, guide_tab = st.tabs(
667
  [
668
+ "RAG",
669
+ "RAG (side-by-side)",
670
+ "Guide",
671
  ]
672
  )
673