Spaces:
Sleeping
Sleeping
dolphinium
commited on
Commit
Β·
621afd7
1
Parent(s):
2cabd2d
add concurrent processing for visualization generation and update report streaming
Browse files
app.py
CHANGED
|
@@ -11,6 +11,7 @@ import seaborn as sns
|
|
| 11 |
import io
|
| 12 |
import os
|
| 13 |
import logging
|
|
|
|
| 14 |
from IPython.display import display, Markdown
|
| 15 |
|
| 16 |
|
|
@@ -637,29 +638,34 @@ def process_analysis_flow(user_input, history, state):
|
|
| 637 |
history.append((None, "β
Data retrieved. Generating visualization..."))
|
| 638 |
yield (history, state, None, None, gr.update(value=formatted_query, visible=True), gr.update(value=formatted_data, visible=True))
|
| 639 |
|
| 640 |
-
|
| 641 |
-
|
| 642 |
-
|
| 643 |
-
|
| 644 |
-
|
| 645 |
-
history.append((None, "
|
| 646 |
-
|
| 647 |
-
|
| 648 |
-
|
| 649 |
-
|
| 650 |
-
|
| 651 |
-
|
| 652 |
-
|
| 653 |
-
|
| 654 |
-
|
| 655 |
-
|
| 656 |
-
|
| 657 |
-
|
| 658 |
-
report_text
|
| 659 |
-
|
| 660 |
-
|
| 661 |
-
|
| 662 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 663 |
|
| 664 |
# 6. Finalize and prompt for next action
|
| 665 |
state['query_count'] += 1
|
|
|
|
| 11 |
import io
|
| 12 |
import os
|
| 13 |
import logging
|
| 14 |
+
import concurrent.futures
|
| 15 |
from IPython.display import display, Markdown
|
| 16 |
|
| 17 |
|
|
|
|
| 638 |
history.append((None, "β
Data retrieved. Generating visualization..."))
|
| 639 |
yield (history, state, None, None, gr.update(value=formatted_query, visible=True), gr.update(value=formatted_data, visible=True))
|
| 640 |
|
| 641 |
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
| 642 |
+
# Start visualization generation in the background
|
| 643 |
+
viz_future = executor.submit(llm_generate_visualization_code, query_context, facet_data)
|
| 644 |
+
|
| 645 |
+
# 5. Generate and Stream Report
|
| 646 |
+
history.append((None, "β
Plot created. Streaming final report..."))
|
| 647 |
+
output_report = gr.update(value="", visible=True) # Make it visible before streaming
|
| 648 |
+
yield (history, state, None, output_report, gr.update(value=formatted_query, visible=True), gr.update(value=formatted_data, visible=True))
|
| 649 |
+
|
| 650 |
+
report_text = ""
|
| 651 |
+
# The history object is not modified during streaming, so we pass it once
|
| 652 |
+
# The yield statement for streaming only updates the report text
|
| 653 |
+
stream_history = history[:] # Make a copy
|
| 654 |
+
for chunk in llm_generate_summary_and_suggestions_stream(query_context, facet_data):
|
| 655 |
+
report_text += chunk
|
| 656 |
+
yield (stream_history, state, None, report_text, gr.update(value=formatted_query, visible=True), gr.update(value=formatted_data, visible=True))
|
| 657 |
+
|
| 658 |
+
# Update the main history with the final report text
|
| 659 |
+
history.append((None, report_text))
|
| 660 |
+
|
| 661 |
+
# Get the visualization code from the future
|
| 662 |
+
viz_code = viz_future.result()
|
| 663 |
+
plot_path = execute_viz_code_and_get_path(viz_code, facet_data)
|
| 664 |
+
|
| 665 |
+
output_plot = gr.update(value=plot_path, visible=True) if plot_path else gr.update(visible=False)
|
| 666 |
+
if not plot_path:
|
| 667 |
+
history.append((None, "*I was unable to generate a plot for this data.*\n"))
|
| 668 |
+
yield (history, state, output_plot, report_text, gr.update(value=formatted_query, visible=True), gr.update(value=formatted_data, visible=True))
|
| 669 |
|
| 670 |
# 6. Finalize and prompt for next action
|
| 671 |
state['query_count'] += 1
|