File size: 7,201 Bytes
c6c1ce5
 
 
 
28a92ae
 
 
c6c1ce5
bb5f164
28a92ae
73c10b7
bb5f164
 
 
28a92ae
 
 
 
bb5f164
 
 
 
 
 
28a92ae
bb5f164
 
 
 
 
 
 
28a92ae
 
 
 
 
 
 
 
bb5f164
28a92ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bb5f164
28a92ae
bb5f164
28a92ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bb5f164
 
28a92ae
 
c6c1ce5
bb5f164
28a92ae
 
 
 
bb5f164
 
 
 
 
 
 
 
 
 
28a92ae
 
bb5f164
28a92ae
 
 
 
 
 
 
 
 
 
 
bb5f164
 
28a92ae
bb5f164
 
 
28a92ae
 
bb5f164
 
28a92ae
 
bb5f164
 
 
 
28a92ae
 
 
bb5f164
 
 
 
28a92ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bb5f164
28a92ae
bb5f164
 
 
 
c6c1ce5
 
28a92ae
 
 
 
bb5f164
28a92ae
 
 
bb5f164
 
28a92ae
 
 
 
 
 
 
 
bb5f164
 
28a92ae
 
 
 
bb5f164
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
import gradio as gr
import PyPDF2
import openai
from config import OPENAI_API_KEY
import pandas as pd
import json
import re
import os

openai.api_key = os.getenv("OPENAI_API_KEY")
 
class PDFChat:
    def __init__(self):
        self.pdf_text = ""
        self.chat_history = []
        self.system_prompt = """You are a knowledgeable assistant specializing in microcontrollers from Renesas, TI, and STM. 
        When comparing microcontrollers, always provide structured data in a JSON format that can be converted to a table.
        Focus on key specifications like CPU frequency, memory, peripherals, ADC Resolution , Flash Memory ,temperature range, and special features."""
    
    def extract_text_from_pdf(self, pdf_file):
        if not pdf_file:
            return "Please upload a PDF file first."
        
        try:
            self.pdf_text = ""
            with open(pdf_file.name, "rb") as file:
                reader = PyPDF2.PdfReader(file)
                for page in reader.pages:
                    self.pdf_text += page.extract_text() + "\n"
            return "PDF loaded successfully! You can now ask questions."
        except Exception as e:
            return f"Error loading PDF: {str(e)}"
    
    def clear_pdf(self):
        self.pdf_text = ""
        return "PDF content cleared."
    
    def clear_chat_history(self):
        self.chat_history = []
        return "", None

    def extract_json_from_text(self, text):
        """Extract JSON data from the response text"""
        # Find JSON pattern between ```json and ```
        json_match = re.search(r'```json\s*(.*?)\s*```', text, re.DOTALL)
        if json_match:
            json_str = json_match.group(1)
        else:
            # Try to find JSON pattern between { and }
            json_match = re.search(r'({[\s\S]*})', text)
            if json_match:
                json_str = json_match.group(1)
            else:
                return None
        
        try:
            return json.loads(json_str)
        except json.JSONDecodeError:
            return None

    def answer_question(self, question):
        if not question:
            return "", None

        structured_prompt = """
        If the question is asking for a comparison or suggestion of microcontrollers, 
        provide your response in the following JSON format wrapped in ```json ```:
        {
            "explanation": "Your textual explanation here",
            "comparison_table": [
                {
                    "Feature": "feature name",
                    "Controller1_Name": "value",
                    "Controller2_Name": "value",
                    ...
                },
                ...
            ]
        }
        """

        messages = [
            {"role": "system", "content": self.system_prompt},
            {"role": "system", "content": structured_prompt}
        ]
        
        if self.pdf_text:
            messages.append({"role": "system", "content": f"PDF Content: {self.pdf_text}"})
        
        for human, assistant in self.chat_history:
            messages.append({"role": "user", "content": human})
            messages.append({"role": "assistant", "content": assistant})
        
        messages.append({"role": "user", "content": question})
        
        try:
            response = openai.ChatCompletion.create(
                model="gpt-4-turbo",
                messages=messages
            )
            response_text = response.choices[0].message['content']
            
            
            json_data = self.extract_json_from_text(response_text)
            
            if json_data and "comparison_table" in json_data:
                df = pd.DataFrame(json_data["comparison_table"])
                explanation = json_data.get('explanation', response_text)
                self.chat_history.append((question, explanation))
                return explanation, df
            else:
                self.chat_history.append((question, response_text))
                return response_text, None
                
        except Exception as e:
            error_message = f"Error generating response: {str(e)}"
            return error_message, None

pdf_chat = PDFChat()

with gr.Blocks() as demo:
    gr.Markdown("# Renasus Chatbot")
    
    with gr.Row():
        with gr.Column(scale=1):
            gr.Markdown("### PDF Controls")
            pdf_input = gr.File(
                label="Upload PDF",
                file_types=[".pdf"]
            )
            with gr.Row():
                load_button = gr.Button("Load PDF")
                clear_pdf_button = gr.Button("Clear PDF")
            status_text = gr.Textbox(
                label="Status",
                interactive=False
            )
        
        with gr.Column(scale=2):
            gr.Markdown("### Microcontroller Selection Interface")
            question_input = gr.Textbox(
                label="Ask about microcontroller selection",
                placeholder="Describe your requirements or ask for comparisons...",
                lines=3
            )
            explanation_text = gr.Textbox(
                label="Explanation",
                interactive=False,
                lines=4
            )
            table_output = gr.DataFrame(
                label="Comparison Table",
                interactive=False,
                wrap=True
            )
            with gr.Row():
                submit_button = gr.Button("Send")
                clear_history_button = gr.Button("Clear Chat History")
            
            with gr.Group():
                gr.Markdown("### Example Questions")
                gr.Examples(
                    examples=[
                        ["Suggest controller suitable for water level monitoring system comparing RA4M1 and STM32L4"],
                        ["Recommend controller for centralized vehicle lighting and door control systems comparing RA6M3 and STM32F4"],
                        ["Suggest best suited controller for a Solar Inverter Design comparing RA6T1 and TMS320F28379D"],
                        ["Compare RA6M5 and STM32G4 series for building automation applications"],
                    ],
                    inputs=[question_input],
                    label="Example Questions"
                )
    
   
    load_button.click(
        pdf_chat.extract_text_from_pdf,
        inputs=[pdf_input],
        outputs=[status_text]
    )
    
    clear_pdf_button.click(
        pdf_chat.clear_pdf,
        outputs=[status_text]
    )
    
    clear_history_button.click(
        pdf_chat.clear_chat_history,
        outputs=[explanation_text, table_output]
    )
    
    def handle_question(question):
        explanation, df = pdf_chat.answer_question(question)
        return explanation, df, ""
    
    question_input.submit(
        handle_question,
        inputs=[question_input],
        outputs=[explanation_text, table_output, question_input]
    )
    
    submit_button.click(
        handle_question,
        inputs=[question_input],
        outputs=[explanation_text, table_output, question_input]
    )

if __name__ == "__main__":
    demo.launch(debug=True)