renesas_chatbot / app.py
arjunanand13's picture
Update app.py
f729024 verified
raw
history blame
4.82 kB
import gradio as gr
import PyPDF2
import openai
from config import OPENAI_API_KEY
import os
openai.api_key = os.getenv("OPENAI_API_KEY")
class PDFChat:
def __init__(self):
self.pdf_text = ""
def extract_text_from_pdf(self, pdf_file):
"""Extract text from PDF file and store it"""
if not pdf_file:
return "Please upload a PDF file first."
try:
self.pdf_text = "" # Clear previous content
with open(pdf_file.name, "rb") as file:
reader = PyPDF2.PdfReader(file)
for page in reader.pages:
self.pdf_text += page.extract_text() + "\n"
return "PDF loaded successfully! You can now ask questions."
except Exception as e:
return f"Error loading PDF: {str(e)}"
def answer_question(self, question, chat_history):
"""Generate answer based on PDF content and conversation history"""
if not self.pdf_text:
return [[question, "Please upload and load a PDF file first."]]
if not question:
return chat_history
# Construct the conversation context
messages = [
{"role": "system", "content": "You are a helpful assistant that answers questions based on the PDF content."},
{"role": "system", "content": f"PDF Content: {self.pdf_text}"}
]
# Add conversation history
for human, assistant in chat_history:
messages.append({"role": "user", "content": human})
messages.append({"role": "assistant", "content": assistant})
# Add current question
messages.append({"role": "user", "content": question})
try:
response = openai.ChatCompletion.create(
model="gpt-4-turbo",
messages=messages
)
answer = response.choices[0].message['content']
# Update chat history with new question and answer
chat_history.append((question, answer))
return chat_history
except Exception as e:
error_message = f"Error generating response: {str(e)}"
chat_history.append((question, error_message))
return chat_history
def clear_history(self):
"""Clear conversation history"""
return []
css = """
.container {
max-width: 800px;
margin: auto;
}
.chat-window {
height: 600px;
overflow-y: auto;
}
"""
# Create PDF Chat instance
pdf_chat = PDFChat()
# Create the Gradio interface
with gr.Blocks(css=css, theme='Taithrah/Minimal') as demo:
gr.Markdown("# Renesas PDF Chatbot")
with gr.Row():
with gr.Column(scale=2):
pdf_input = gr.File(
label="Upload PDF",
file_types=[".pdf"]
)
load_button = gr.Button("Load PDF")
status_text = gr.Textbox(
label="Status",
interactive=False
)
with gr.Row():
chatbot = gr.Chatbot(
[],
elem_id="chatbot",
label="Chat History",
height=400
)
with gr.Row():
question_input = gr.Textbox(
label="Ask a question",
placeholder="What would you like to know about the PDF?",
scale=4
)
submit_button = gr.Button("Send", scale=1)
clear_button = gr.Button("Clear History", scale=1)
# Example queries
gr.Examples(
examples=[
["renesas-ra6m1-group-datasheet.pdf", "Which Renesas products are mentioned in this PDF?"],
["renesas-ra6m1-group-datasheet.pdf", "What are the key features of the microcontroller?"],
["renesas-ra6m1-group-datasheet.pdf", "Explain the power consumption specifications."]
],
inputs=[pdf_input, question_input],
label="Example Queries"
)
# Event handlers
load_button.click(
pdf_chat.extract_text_from_pdf,
inputs=[pdf_input],
outputs=[status_text]
)
# Function to clear input after sending
def clear_input():
return ""
question_input.submit(
pdf_chat.answer_question,
inputs=[question_input, chatbot],
outputs=[chatbot]
).then(
clear_input,
outputs=[question_input]
)
submit_button.click(
pdf_chat.answer_question,
inputs=[question_input, chatbot],
outputs=[chatbot]
).then(
clear_input,
outputs=[question_input]
)
clear_button.click(
pdf_chat.clear_history,
outputs=[chatbot]
)
# Launch the interface with sharing enabled
if __name__ == "__main__":
demo.launch(debug=True)