File size: 16,220 Bytes
4af3a5e 016ea8f 4af3a5e 5576447 4af3a5e a87a04f 4af3a5e aca6259 05a8b3a 2c34769 16522e2 5576447 531765d 5576447 a105452 5576447 4af3a5e ec25ed5 3ffd86f 5576447 3ffd86f bd56d11 0e35be5 bd56d11 ec25ed5 3ffd86f 67be25f bd56d11 e449567 4af3a5e 0e35be5 635f794 0e35be5 635f794 0e35be5 a703ba0 0e35be5 635f794 0e35be5 49b618f 5576447 ca02ad4 3ffd86f 4af3a5e 3c23cf2 3ffd86f 358978e ab5d099 3ffd86f 2c34769 3ffd86f 8260884 5576447 2c34769 5576447 2c34769 8260884 3ffd86f bd56d11 5576447 a105452 5576447 4af3a5e 5576447 c82b8c1 5303b71 16522e2 c82b8c1 cca54b2 5303b71 3ffd86f 089629c 5303b71 5576447 aca6259 5303b71 16522e2 5303b71 ec25ed5 5303b71 358978e 16522e2 5303b71 a105452 5303b71 16522e2 5303b71 a4c929f a105452 531765d a105452 bb9c191 c82b8c1 a105452 c82b8c1 bb9c191 6680c14 531765d bb9c191 c82b8c1 5576447 16522e2 c82b8c1 a105452 c82b8c1 6680c14 531765d bb9c191 c82b8c1 5576447 c82b8c1 bb9c191 c82b8c1 5576447 2c34769 5576447 ec25ed5 c82b8c1 358978e 2c973dd 4af3a5e 5576447 c82b8c1 5576447 a105452 5576447 a105452 2c34769 6680c14 bb9c191 c82b8c1 5576447 c82b8c1 5576447 c82b8c1 2c34769 c82b8c1 5576447 c82b8c1 089629c 5576447 c82b8c1 4af3a5e c82b8c1 fff1fe9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 |
# Import necessary libraries
import os
import gradio as gr
from azure.storage.fileshare import ShareServiceClient
# Import custom modules
from climateqa.engine.embeddings import get_embeddings_function
from climateqa.engine.llm import get_llm
from climateqa.engine.vectorstore import get_pinecone_vectorstore
from climateqa.engine.reranker import get_reranker
from climateqa.engine.graph import make_graph_agent,make_graph_agent_poc
from climateqa.engine.chains.retrieve_papers import find_papers
from climateqa.chat import start_chat, chat_stream, finish_chat
from climateqa.engine.talk_to_data.main import ask_vanna
from front.tabs import (create_config_modal, create_examples_tab, create_papers_tab, create_figures_tab, create_chat_interface, create_about_tab)
from front.utils import process_figures
from gradio_modal import Modal
from utils import create_user_id
import logging
logging.basicConfig(level=logging.WARNING)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # Suppresses INFO and WARNING logs
logging.getLogger().setLevel(logging.WARNING)
# Load environment variables in local mode
try:
from dotenv import load_dotenv
load_dotenv()
except Exception as e:
pass
# Set up Gradio Theme
theme = gr.themes.Base(
primary_hue="blue",
secondary_hue="red",
font=[gr.themes.GoogleFont("Poppins"), "ui-sans-serif", "system-ui", "sans-serif"],
)
# Azure Blob Storage credentials
account_key = os.environ["BLOB_ACCOUNT_KEY"]
if len(account_key) == 86:
account_key += "=="
credential = {
"account_key": account_key,
"account_name": os.environ["BLOB_ACCOUNT_NAME"],
}
account_url = os.environ["BLOB_ACCOUNT_URL"]
file_share_name = "climateqa"
service = ShareServiceClient(account_url=account_url, credential=credential)
share_client = service.get_share_client(file_share_name)
user_id = create_user_id()
# Create vectorstore and retriever
embeddings_function = get_embeddings_function()
vectorstore = get_pinecone_vectorstore(embeddings_function, index_name=os.getenv("PINECONE_API_INDEX"))
vectorstore_graphs = get_pinecone_vectorstore(embeddings_function, index_name=os.getenv("PINECONE_API_INDEX_OWID"), text_key="description")
vectorstore_region = get_pinecone_vectorstore(embeddings_function, index_name=os.getenv("PINECONE_API_INDEX_REGION"))
llm = get_llm(provider="openai",max_tokens = 1024,temperature = 0.0)
if os.environ["GRADIO_ENV"] == "local":
reranker = get_reranker("nano")
else :
reranker = get_reranker("large")
agent = make_graph_agent(llm=llm, vectorstore_ipcc=vectorstore, vectorstore_graphs=vectorstore_graphs, vectorstore_region = vectorstore_region, reranker=reranker, threshold_docs=0.2)
agent_poc = make_graph_agent_poc(llm=llm, vectorstore_ipcc=vectorstore, vectorstore_graphs=vectorstore_graphs, vectorstore_region = vectorstore_region, reranker=reranker, threshold_docs=0)#TODO put back default 0.2
async def chat(query, history, audience, sources, reports, relevant_content_sources_selection, search_only):
print("chat cqa - message received")
async for event in chat_stream(agent, query, history, audience, sources, reports, relevant_content_sources_selection, search_only, share_client, user_id):
yield event
async def chat_poc(query, history, audience, sources, reports, relevant_content_sources_selection, search_only):
print("chat poc - message received")
async for event in chat_stream(agent_poc, query, history, audience, sources, reports, relevant_content_sources_selection, search_only, share_client, user_id):
yield event
# --------------------------------------------------------------------
# Gradio
# --------------------------------------------------------------------
# Function to update modal visibility
def update_config_modal_visibility(config_open):
new_config_visibility_status = not config_open
return Modal(visible=new_config_visibility_status), new_config_visibility_status
def update_sources_number_display(sources_textbox, figures_cards, current_graphs, papers_html):
sources_number = sources_textbox.count("<h2>")
figures_number = figures_cards.count("<h2>")
graphs_number = current_graphs.count("<iframe")
papers_number = papers_html.count("<h2>")
sources_notif_label = f"Sources ({sources_number})"
figures_notif_label = f"Figures ({figures_number})"
graphs_notif_label = f"Graphs ({graphs_number})"
papers_notif_label = f"Papers ({papers_number})"
recommended_content_notif_label = f"Recommended content ({figures_number + graphs_number + papers_number})"
return gr.update(label=recommended_content_notif_label), gr.update(label=sources_notif_label), gr.update(label=figures_notif_label), gr.update(label=graphs_notif_label), gr.update(label=papers_notif_label)
# # UI Layout Components
def cqa_tab(tab_name):
# State variables
current_graphs = gr.State([])
with gr.Tab(tab_name):
with gr.Row(elem_id="chatbot-row"):
# Left column - Chat interface
with gr.Column(scale=2):
chatbot, textbox, config_button = create_chat_interface(tab_name)
# Right column - Content panels
with gr.Column(scale=2, variant="panel", elem_id="right-panel"):
with gr.Tabs(elem_id="right_panel_tab") as tabs:
# Examples tab
with gr.TabItem("Examples", elem_id="tab-examples", id=0):
examples_hidden = create_examples_tab()
# Sources tab
with gr.Tab("Sources", elem_id="tab-sources", id=1) as tab_sources:
sources_textbox = gr.HTML(show_label=False, elem_id="sources-textbox")
# Recommended content tab
with gr.Tab("Recommended content", elem_id="tab-recommended_content", id=2) as tab_recommended_content:
with gr.Tabs(elem_id="group-subtabs") as tabs_recommended_content:
# Figures subtab
with gr.Tab("Figures", elem_id="tab-figures", id=3) as tab_figures:
sources_raw, new_figures, used_figures, gallery_component, figures_cards, figure_modal = create_figures_tab()
# Papers subtab
with gr.Tab("Papers", elem_id="tab-citations", id=4) as tab_papers:
papers_direct_search, papers_summary, papers_html, citations_network, papers_modal = create_papers_tab()
# Graphs subtab
with gr.Tab("Graphs", elem_id="tab-graphs", id=5) as tab_graphs:
graphs_container = gr.HTML(
"<h2>There are no graphs to be displayed at the moment. Try asking another question.</h2>",
elem_id="graphs-container"
)
with gr.Tab("DRIAS", elem_id="tab-vanna", id=6) as tab_vanna:
vanna_direct_question = gr.Textbox(label="Direct Question", placeholder="You can write direct question here",elem_id="direct-question", interactive=True)
with gr.Accordion("Details",elem_id = 'vanna-details', open=False) as vanna_details :
vanna_sql_query = gr.Textbox(label="SQL Query Used", elem_id="sql-query", interactive=False)
show_vanna_table = gr.Button("Show Table", elem_id="show-table")
with Modal(visible=False) as vanna_table_modal:
vanna_table = gr.DataFrame([], elem_id="vanna-table")
close_vanna_modal = gr.Button("Close", elem_id="close-vanna-modal")
close_vanna_modal.click(lambda: Modal(visible=False),None, [vanna_table_modal])
show_vanna_table.click(lambda: Modal(visible=True),None ,[vanna_table_modal])
vanna_display = gr.Plot()
vanna_direct_question.submit(ask_vanna, [vanna_direct_question], [vanna_sql_query ,vanna_table, vanna_display])
return {
"chatbot": chatbot,
"textbox": textbox,
"tabs": tabs,
"sources_raw": sources_raw,
"new_figures": new_figures,
"current_graphs": current_graphs,
"examples_hidden": examples_hidden,
"sources_textbox": sources_textbox,
"figures_cards": figures_cards,
"gallery_component": gallery_component,
"config_button": config_button,
"papers_direct_search" : papers_direct_search,
"papers_html": papers_html,
"citations_network": citations_network,
"papers_summary": papers_summary,
"tab_recommended_content": tab_recommended_content,
"tab_sources": tab_sources,
"tab_figures": tab_figures,
"tab_graphs": tab_graphs,
"tab_papers": tab_papers,
"graph_container": graphs_container,
"vanna_sql_query": vanna_sql_query,
"vanna_table" : vanna_table,
"vanna_display": vanna_display
}
def event_handling(
main_tab_components,
config_components,
tab_name="ClimateQ&A"
):
chatbot = main_tab_components["chatbot"]
textbox = main_tab_components["textbox"]
tabs = main_tab_components["tabs"]
sources_raw = main_tab_components["sources_raw"]
new_figures = main_tab_components["new_figures"]
current_graphs = main_tab_components["current_graphs"]
examples_hidden = main_tab_components["examples_hidden"]
sources_textbox = main_tab_components["sources_textbox"]
figures_cards = main_tab_components["figures_cards"]
gallery_component = main_tab_components["gallery_component"]
config_button = main_tab_components["config_button"]
papers_direct_search = main_tab_components["papers_direct_search"]
papers_html = main_tab_components["papers_html"]
citations_network = main_tab_components["citations_network"]
papers_summary = main_tab_components["papers_summary"]
tab_recommended_content = main_tab_components["tab_recommended_content"]
tab_sources = main_tab_components["tab_sources"]
tab_figures = main_tab_components["tab_figures"]
tab_graphs = main_tab_components["tab_graphs"]
tab_papers = main_tab_components["tab_papers"]
graphs_container = main_tab_components["graph_container"]
vanna_sql_query = main_tab_components["vanna_sql_query"]
vanna_table = main_tab_components["vanna_table"]
vanna_display = main_tab_components["vanna_display"]
config_open = config_components["config_open"]
config_modal = config_components["config_modal"]
dropdown_sources = config_components["dropdown_sources"]
dropdown_reports = config_components["dropdown_reports"]
dropdown_external_sources = config_components["dropdown_external_sources"]
search_only = config_components["search_only"]
dropdown_audience = config_components["dropdown_audience"]
after = config_components["after"]
output_query = config_components["output_query"]
output_language = config_components["output_language"]
close_config_modal = config_components["close_config_modal_button"]
new_sources_hmtl = gr.State([])
ttd_data = gr.State([])
for button in [config_button, close_config_modal]:
button.click(
fn=update_config_modal_visibility,
inputs=[config_open],
outputs=[config_modal, config_open]
)
if tab_name == "ClimateQ&A":
print("chat cqa - message sent")
# Event for textbox
(textbox
.submit(start_chat, [textbox, chatbot, search_only], [textbox, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{textbox.elem_id}")
.then(chat, [textbox, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{textbox.elem_id}")
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{textbox.elem_id}")
)
# Event for examples_hidden
(examples_hidden
.change(start_chat, [examples_hidden, chatbot, search_only], [examples_hidden, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{examples_hidden.elem_id}")
.then(chat, [examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{examples_hidden.elem_id}")
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{examples_hidden.elem_id}")
)
elif tab_name == "Beta - POC Adapt'Action":
print("chat poc - message sent")
# Event for textbox
(textbox
.submit(start_chat, [textbox, chatbot, search_only], [textbox, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{textbox.elem_id}")
.then(chat_poc, [textbox, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{textbox.elem_id}")
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{textbox.elem_id}")
)
# Event for examples_hidden
(examples_hidden
.change(start_chat, [examples_hidden, chatbot, search_only], [examples_hidden, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{examples_hidden.elem_id}")
.then(chat_poc, [examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{examples_hidden.elem_id}")
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{examples_hidden.elem_id}")
)
new_sources_hmtl.change(lambda x : x, inputs = [new_sources_hmtl], outputs = [sources_textbox])
current_graphs.change(lambda x: x, inputs=[current_graphs], outputs=[graphs_container])
new_figures.change(process_figures, inputs=[sources_raw, new_figures], outputs=[sources_raw, figures_cards, gallery_component])
# Update sources numbers
for component in [sources_textbox, figures_cards, current_graphs, papers_html]:
component.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs, papers_html], [tab_recommended_content, tab_sources, tab_figures, tab_graphs, tab_papers])
# Search for papers
for component in [textbox, examples_hidden, papers_direct_search]:
component.submit(find_papers, [component, after, dropdown_external_sources], [papers_html, citations_network, papers_summary])
if tab_name == "Beta - POC Adapt'Action":
# Drias search
textbox.submit(ask_vanna, [textbox], [vanna_sql_query ,vanna_table, vanna_display])
def main_ui():
# config_open = gr.State(True)
with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme, elem_id="main-component") as demo:
config_components = create_config_modal()
with gr.Tabs():
cqa_components = cqa_tab(tab_name = "ClimateQ&A")
local_cqa_components = cqa_tab(tab_name = "Beta - POC Adapt'Action")
create_about_tab()
event_handling(cqa_components, config_components, tab_name = 'ClimateQ&A')
event_handling(local_cqa_components, config_components, tab_name = "Beta - POC Adapt'Action")
demo.queue()
return demo
demo = main_ui()
demo.launch(ssr_mode=False)
|