renesas_chatbot / app.py
arjunanand13's picture
Update app.py
73c10b7 verified
raw
history blame
7.2 kB
import gradio as gr
import PyPDF2
import openai
from config import OPENAI_API_KEY
import pandas as pd
import json
import re
import os
openai.api_key = os.getenv("OPENAI_API_KEY")
class PDFChat:
def __init__(self):
self.pdf_text = ""
self.chat_history = []
self.system_prompt = """You are a knowledgeable assistant specializing in microcontrollers from Renesas, TI, and STM.
When comparing microcontrollers, always provide structured data in a JSON format that can be converted to a table.
Focus on key specifications like CPU frequency, memory, peripherals, ADC Resolution , Flash Memory ,temperature range, and special features."""
def extract_text_from_pdf(self, pdf_file):
if not pdf_file:
return "Please upload a PDF file first."
try:
self.pdf_text = ""
with open(pdf_file.name, "rb") as file:
reader = PyPDF2.PdfReader(file)
for page in reader.pages:
self.pdf_text += page.extract_text() + "\n"
return "PDF loaded successfully! You can now ask questions."
except Exception as e:
return f"Error loading PDF: {str(e)}"
def clear_pdf(self):
self.pdf_text = ""
return "PDF content cleared."
def clear_chat_history(self):
self.chat_history = []
return "", None
def extract_json_from_text(self, text):
"""Extract JSON data from the response text"""
# Find JSON pattern between ```json and ```
json_match = re.search(r'```json\s*(.*?)\s*```', text, re.DOTALL)
if json_match:
json_str = json_match.group(1)
else:
# Try to find JSON pattern between { and }
json_match = re.search(r'({[\s\S]*})', text)
if json_match:
json_str = json_match.group(1)
else:
return None
try:
return json.loads(json_str)
except json.JSONDecodeError:
return None
def answer_question(self, question):
if not question:
return "", None
structured_prompt = """
If the question is asking for a comparison or suggestion of microcontrollers,
provide your response in the following JSON format wrapped in ```json ```:
{
"explanation": "Your textual explanation here",
"comparison_table": [
{
"Feature": "feature name",
"Controller1_Name": "value",
"Controller2_Name": "value",
...
},
...
]
}
"""
messages = [
{"role": "system", "content": self.system_prompt},
{"role": "system", "content": structured_prompt}
]
if self.pdf_text:
messages.append({"role": "system", "content": f"PDF Content: {self.pdf_text}"})
for human, assistant in self.chat_history:
messages.append({"role": "user", "content": human})
messages.append({"role": "assistant", "content": assistant})
messages.append({"role": "user", "content": question})
try:
response = openai.ChatCompletion.create(
model="gpt-4-turbo",
messages=messages
)
response_text = response.choices[0].message['content']
json_data = self.extract_json_from_text(response_text)
if json_data and "comparison_table" in json_data:
df = pd.DataFrame(json_data["comparison_table"])
explanation = json_data.get('explanation', response_text)
self.chat_history.append((question, explanation))
return explanation, df
else:
self.chat_history.append((question, response_text))
return response_text, None
except Exception as e:
error_message = f"Error generating response: {str(e)}"
return error_message, None
pdf_chat = PDFChat()
with gr.Blocks() as demo:
gr.Markdown("# Renasus Chatbot")
with gr.Row():
with gr.Column(scale=1):
gr.Markdown("### PDF Controls")
pdf_input = gr.File(
label="Upload PDF",
file_types=[".pdf"]
)
with gr.Row():
load_button = gr.Button("Load PDF")
clear_pdf_button = gr.Button("Clear PDF")
status_text = gr.Textbox(
label="Status",
interactive=False
)
with gr.Column(scale=2):
gr.Markdown("### Microcontroller Selection Interface")
question_input = gr.Textbox(
label="Ask about microcontroller selection",
placeholder="Describe your requirements or ask for comparisons...",
lines=3
)
explanation_text = gr.Textbox(
label="Explanation",
interactive=False,
lines=4
)
table_output = gr.DataFrame(
label="Comparison Table",
interactive=False,
wrap=True
)
with gr.Row():
submit_button = gr.Button("Send")
clear_history_button = gr.Button("Clear Chat History")
with gr.Group():
gr.Markdown("### Example Questions")
gr.Examples(
examples=[
["Suggest controller suitable for water level monitoring system comparing RA4M1 and STM32L4"],
["Recommend controller for centralized vehicle lighting and door control systems comparing RA6M3 and STM32F4"],
["Suggest best suited controller for a Solar Inverter Design comparing RA6T1 and TMS320F28379D"],
["Compare RA6M5 and STM32G4 series for building automation applications"],
],
inputs=[question_input],
label="Example Questions"
)
load_button.click(
pdf_chat.extract_text_from_pdf,
inputs=[pdf_input],
outputs=[status_text]
)
clear_pdf_button.click(
pdf_chat.clear_pdf,
outputs=[status_text]
)
clear_history_button.click(
pdf_chat.clear_chat_history,
outputs=[explanation_text, table_output]
)
def handle_question(question):
explanation, df = pdf_chat.answer_question(question)
return explanation, df, ""
question_input.submit(
handle_question,
inputs=[question_input],
outputs=[explanation_text, table_output, question_input]
)
submit_button.click(
handle_question,
inputs=[question_input],
outputs=[explanation_text, table_output, question_input]
)
if __name__ == "__main__":
demo.launch(debug=True)