File size: 4,821 Bytes
c6c1ce5
 
 
 
 
 
 
bb5f164
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c6c1ce5
bb5f164
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c6c1ce5
 
bb5f164
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
import gradio as gr
import PyPDF2
import openai
from config import OPENAI_API_KEY
import os
openai.api_key = os.getenv("OPENAI_API_KEY")



class PDFChat:
    def __init__(self):
        self.pdf_text = ""
    
    def extract_text_from_pdf(self, pdf_file):
        """Extract text from PDF file and store it"""
        if not pdf_file:
            return "Please upload a PDF file first."
        
        try:
            self.pdf_text = ""  # Clear previous content
            with open(pdf_file.name, "rb") as file:
                reader = PyPDF2.PdfReader(file)
                for page in reader.pages:
                    self.pdf_text += page.extract_text() + "\n"
            return "PDF loaded successfully! You can now ask questions."
        except Exception as e:
            return f"Error loading PDF: {str(e)}"

    def answer_question(self, question, chat_history):
        """Generate answer based on PDF content and conversation history"""
        if not self.pdf_text:
            return [[question, "Please upload and load a PDF file first."]]

        if not question:
            return chat_history

        # Construct the conversation context
        messages = [
            {"role": "system", "content": "You are a helpful assistant that answers questions based on the PDF content."},
            {"role": "system", "content": f"PDF Content: {self.pdf_text}"}
        ]
        
        # Add conversation history
        for human, assistant in chat_history:
            messages.append({"role": "user", "content": human})
            messages.append({"role": "assistant", "content": assistant})
        
        # Add current question
        messages.append({"role": "user", "content": question})
        
        try:
            response = openai.ChatCompletion.create(
                model="gpt-4-turbo",
                messages=messages
            )
            answer = response.choices[0].message['content']
            
            # Update chat history with new question and answer
            chat_history.append((question, answer))
            return chat_history
        except Exception as e:
            error_message = f"Error generating response: {str(e)}"
            chat_history.append((question, error_message))
            return chat_history

    def clear_history(self):
        """Clear conversation history"""
        return []

css = """
.container {
    max-width: 800px;
    margin: auto;
}
.chat-window {
    height: 600px;
    overflow-y: auto;
}
"""

# Create PDF Chat instance
pdf_chat = PDFChat()

# Create the Gradio interface
with gr.Blocks(css=css, theme='Taithrah/Minimal') as demo:
    gr.Markdown("# Renesas PDF Chatbot")
    
    with gr.Row():
        with gr.Column(scale=2):
            pdf_input = gr.File(
                label="Upload PDF",
                file_types=[".pdf"]
            )
            load_button = gr.Button("Load PDF")
            status_text = gr.Textbox(
                label="Status",
                interactive=False
            )
    
    with gr.Row():
        chatbot = gr.Chatbot(
            [],
            elem_id="chatbot",
            label="Chat History",
            height=400
        )
    
    with gr.Row():
        question_input = gr.Textbox(
            label="Ask a question",
            placeholder="What would you like to know about the PDF?",
            scale=4
        )
        submit_button = gr.Button("Send", scale=1)
        clear_button = gr.Button("Clear History", scale=1)
    
    # Example queries
    gr.Examples(
        examples=[
            ["renesas-ra6m1-group-datasheet.pdf", "Which Renesas products are mentioned in this PDF?"],
            ["renesas-ra6m1-group-datasheet.pdf", "What are the key features of the microcontroller?"],
            ["renesas-ra6m1-group-datasheet.pdf", "Explain the power consumption specifications."]
        ],
        inputs=[pdf_input, question_input],
        label="Example Queries"
    )
    
    # Event handlers
    load_button.click(
        pdf_chat.extract_text_from_pdf,
        inputs=[pdf_input],
        outputs=[status_text]
    )
    
    # Function to clear input after sending
    def clear_input():
        return ""
    
    question_input.submit(
        pdf_chat.answer_question,
        inputs=[question_input, chatbot],
        outputs=[chatbot]
    ).then(
        clear_input,
        outputs=[question_input]
    )
    
    submit_button.click(
        pdf_chat.answer_question,
        inputs=[question_input, chatbot],
        outputs=[chatbot]
    ).then(
        clear_input,
        outputs=[question_input]
    )
    
    clear_button.click(
        pdf_chat.clear_history,
        outputs=[chatbot]
    )

# Launch the interface with sharing enabled
if __name__ == "__main__":
    demo.launch(debug=True)