File size: 2,877 Bytes
96911b6 9200125 96911b6 9200125 96911b6 3d7d31a 96911b6 89661b3 96911b6 bb3ba32 96911b6 bb3ba32 96911b6 e1b9d08 bb3ba32 3d7d31a bb3ba32 e1b9d08 bb3ba32 e1b9d08 bb3ba32 e1b9d08 bb3ba32 e1b9d08 3d7d31a e1b9d08 bb3ba32 e1b9d08 bb3ba32 e1b9d08 bb3ba32 e1b9d08 bb3ba32 dd27210 6bc65f5 e1b9d08 bb3ba32 e1b9d08 bb3ba32 3d7d31a 96911b6 3d7d31a 9200125 96911b6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
import gradio as gr
from gradio_client import Client, handle_file
import os
# Define your Hugging Face token (make sure to set it as an environment variable)
HF_TOKEN = os.getenv("HF_TOKEN") # Replace with your actual token if not using an environment variable
# Initialize the Gradio Client for the specified API
client = Client("mangoesai/Elections_Comparison_Agent_V4", hf_token=HF_TOKEN)
# client_name = ['2016 Election','2024 Election', 'Comparison two years']
def stream_chat_with_rag(
message: str,
# history: list,
client_name: str
):
# print(f"Message: {message}")
#answer = client.predict(question=question, api_name="/run_graph")
answer, fig = client.predict(
query= message,
election_year=client_name,
api_name="/process_query"
)
# Debugging: Print the raw response
print("Raw answer from API:")
print(answer)
return answer, fig
# Create Gradio interface
with gr.Blocks(title="Reddit Election Analysis") as demo:
gr.Markdown("# Reddit Public sentiment & Social topic distribution ")
gr.Markdown("# Reddit Election Posts/Comments Analysis")
gr.Markdown("Ask questions about election-related comments and posts")
with gr.Row():
with gr.Column():
year_selector = gr.Radio(
choices=["2016 Election", "2024 Election", "Comparison two years"],
label="Select Election Year",
value="2016 Election"
)
query_input = gr.Textbox(
label="Your Question",
placeholder="Ask about election comments or posts..."
)
submit_btn = gr.Button("Submit")
gr.Markdown("""
## Example Questions:
- Is there any comments don't like the election results
- Summarize the main discussions about voting process
- What are the common opinions about candidates?
""")
with gr.Column():
output_text = gr.Textbox(
label="Response",
lines=20
)
with gr.Row():
output_plot = gr.Plot(
label="Topic Distribution",
container=True, # Ensures the plot is contained within its area
elem_classes="topic-plot" # Add a custom class for styling
)
# Add custom CSS to ensure proper plot sizing
gr.HTML("""
<style>
.topic-plot {
min-height: 600px;
width: 100%;
margin: auto;
}
</style>
""")
# Update both outputs when submit is clicked
submit_btn.click(
fn=stream_chat_with_rag,
inputs=[query_input, year_selector],
outputs=[output_text, output_plot]
)
if __name__ == "__main__":
demo.launch(share=True)
demo.launch(share_erro=True)
|