2001muhammadumair commited on
Commit
7213a62
·
verified ·
1 Parent(s): f72a451

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +213 -0
app.py ADDED
@@ -0,0 +1,213 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ import logging
4
+ from groq import Groq
5
+ from sentence_transformers import SentenceTransformer
6
+ import faiss
7
+ import numpy as np
8
+ import PyPDF2
9
+ from sklearn.metrics.pairwise import cosine_similarity
10
+ from collections import Counter
11
+
12
+ # --------------------- Setup ---------------------
13
+
14
+ logging.basicConfig(
15
+ filename='query_logs.log',
16
+ level=logging.INFO,
17
+ format='%(asctime)s:%(levelname)s:%(message)s'
18
+ )
19
+
20
+ GROQ_API_KEY = "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"
21
+ client = Groq(api_key=GROQ_API_KEY)
22
+
23
+ PDF_PATH = 'Robert Ciesla - The Book of Chatbots_ From ELIZA to ChatGPT-Springer (2024).pdf'
24
+ sentence_transformer_model = SentenceTransformer('all-MiniLM-L6-v2')
25
+ cache = {}
26
+
27
+ # --------------------- PDF Processing ---------------------
28
+
29
+ def read_pdf(file_path):
30
+ if not os.path.exists(file_path):
31
+ logging.error(f"PDF file not found at: {file_path}")
32
+ return []
33
+
34
+ sentences_with_pages = []
35
+ with open(file_path, 'rb') as file:
36
+ reader = PyPDF2.PdfReader(file)
37
+ for page_num, page in enumerate(reader.pages):
38
+ text = page.extract_text()
39
+ if text:
40
+ sentences = [sentence.strip() for sentence in text.split('\n') if sentence.strip()]
41
+ for sentence in sentences:
42
+ sentences_with_pages.append({'sentence': sentence, 'page_number': page_num + 1})
43
+ return sentences_with_pages
44
+
45
+ sentences_with_pages = read_pdf(PDF_PATH)
46
+ vector_index, sentences_with_pages = vectorize_text(sentences_with_pages)
47
+
48
+ def vectorize_text(sentences_with_pages):
49
+ try:
50
+ sentences = [item['sentence'] for item in sentences_with_pages]
51
+ embeddings = sentence_transformer_model.encode(sentences, show_progress_bar=True)
52
+ index = faiss.IndexFlatL2(embeddings.shape[1])
53
+ index.add(np.array(embeddings))
54
+ logging.info(f"Added {len(sentences)} sentences to the vector store.")
55
+ return index, sentences_with_pages
56
+ except Exception as e:
57
+ logging.error(f"Error during vectorization: {str(e)}")
58
+ return None, None
59
+
60
+ # --------------------- Query Handling ---------------------
61
+
62
+ def generate_query_embedding(query):
63
+ return sentence_transformer_model.encode([query])
64
+
65
+ def is_query_relevant(distances, threshold=1.0):
66
+ return distances[0][0] <= threshold
67
+
68
+ def generate_diverse_responses(prompt, n=3):
69
+ responses = []
70
+ for i in range(n):
71
+ temperature = 0.7 + (i * 0.1)
72
+ top_p = 0.9 - (i * 0.1)
73
+ try:
74
+ chat_completion = client.chat.completions.create(
75
+ messages=[{"role": "user", "content": prompt}],
76
+ model="llama3-8b-8192",
77
+ temperature=temperature,
78
+ top_p=top_p
79
+ )
80
+ responses.append(chat_completion.choices[0].message.content.strip())
81
+ except Exception as e:
82
+ logging.error(f"Error generating response: {str(e)}")
83
+ responses.append("Error generating this response.")
84
+ return responses
85
+
86
+ def aggregate_responses(responses):
87
+ response_counter = Counter(responses)
88
+ most_common_response, count = response_counter.most_common(1)[0]
89
+ if count > 1:
90
+ return most_common_response
91
+ else:
92
+ embeddings = sentence_transformer_model.encode(responses)
93
+ avg_embedding = np.mean(embeddings, axis=0)
94
+ similarities = cosine_similarity([avg_embedding], embeddings)[0]
95
+ return responses[np.argmax(similarities)]
96
+
97
+ def generate_answer(query):
98
+ if query in cache:
99
+ logging.info(f"Cache hit for query: {query}")
100
+ return cache[query]
101
+
102
+ try:
103
+ query_embedding = generate_query_embedding(query)
104
+ D, I = vector_index.search(np.array(query_embedding), k=5)
105
+
106
+ if is_query_relevant(D):
107
+ relevant_items = [sentences_with_pages[i] for i in I[0]]
108
+ combined_text = " ".join([item['sentence'] for item in relevant_items])
109
+ page_numbers = sorted(set([item['page_number'] for item in relevant_items]))
110
+ page_numbers_str = ', '.join(map(str, page_numbers))
111
+
112
+ # Construct primary prompt
113
+ prompt = f"""
114
+ Use the following context from "The Book of Chatbots" to answer the question. If additional explaination is needed, provide an example.
115
+
116
+ **Context (Pages {page_numbers_str}):**
117
+ {combined_text}
118
+
119
+ **User's question:**
120
+ {query}
121
+
122
+ **Remember to indicate the specific page numbers.**
123
+ """
124
+ primary_responses = generate_diverse_responses(prompt)
125
+ primary_answer = aggregate_responses(primary_responses)
126
+
127
+ # Construct additional prompt for explanations
128
+ explanation_prompt = f"""
129
+ The user has a question about a complex topic. Could you provide an explaination or example and real life example for better understanding?
130
+
131
+ **User's question:**
132
+ {query}
133
+
134
+ **Primary answer:**
135
+ {primary_answer}
136
+ """
137
+ explanation_responses = generate_diverse_responses(explanation_prompt)
138
+ explanation_answer = aggregate_responses(explanation_responses)
139
+
140
+ # Combine primary answer and explanation
141
+ full_response = f"{primary_answer}\n\n{explanation_answer}\n\n_From 'The Book of Chatbots,' pages {page_numbers_str}_"
142
+ cache[query] = full_response
143
+ logging.info(f"Generated response for query: {query}")
144
+ return full_response
145
+
146
+ else:
147
+ # General knowledge fallback
148
+ prompt = f"""
149
+ The user asked a question that is not covered in "The Book of Chatbots." Please provide a helpful answer using general knowledge.
150
+
151
+ **User's question:**
152
+ {query}
153
+ """
154
+ fallback_responses = generate_diverse_responses(prompt)
155
+ fallback_answer = aggregate_responses(fallback_responses)
156
+ cache[query] = fallback_answer
157
+ return fallback_answer
158
+
159
+ except Exception as e:
160
+ logging.error(f"Error generating answer: {str(e)}")
161
+ return "Sorry, an error occurred while generating the answer."
162
+
163
+ # --------------------- Gradio Interface ---------------------
164
+
165
+ def gradio_interface(user_query, history):
166
+ response = generate_answer(user_query)
167
+ history = history or []
168
+ history.append({"role": "user", "content": user_query})
169
+ history.append({"role": "assistant", "content": response})
170
+ return history, history
171
+
172
+ # Create the Gradio interface
173
+ with gr.Blocks(css=".gradio-container {background-color: #f0f0f0}") as iface:
174
+ gr.Markdown("""
175
+ # **The Book of Chatbot**
176
+ *Dive into the evolution of chatbots from ELIZA to ChatGPT with Chatbot Chronicles. Ask any question and explore the fascinating world of conversational AI as presented in Robert Ciesla's "The Book of Chatbots.*
177
+ """)
178
+
179
+ chatbot = gr.Chatbot(height=500, type='messages')
180
+ state = gr.State([])
181
+
182
+ with gr.Row():
183
+ txt = gr.Textbox(
184
+ show_label=False,
185
+ placeholder="Type your message here and press Enter",
186
+ container=False
187
+ )
188
+ submit_btn = gr.Button("Send")
189
+
190
+ def submit_message(user_query, history):
191
+ history = history or []
192
+ history.append({"role": "user", "content": user_query})
193
+ return "", history
194
+
195
+ def bot_response(history):
196
+ user_query = history[-1]['content']
197
+ response = generate_answer(user_query)
198
+ history.append({"role": "assistant", "content": response})
199
+ return history
200
+
201
+ txt.submit(submit_message, [txt, state], [txt, state], queue=False).then(
202
+ bot_response, state, chatbot
203
+ )
204
+ submit_btn.click(submit_message, [txt, state], [txt, state], queue=False).then(
205
+ bot_response, state, chatbot
206
+ )
207
+
208
+ reset_btn = gr.Button("Reset Chat")
209
+ reset_btn.click(lambda: ([], []), outputs=[chatbot, state], queue=False)
210
+
211
+ # Launch the Gradio app
212
+ if __name__ == "__main__":
213
+ iface.launch()