Update app.py
Browse files
app.py
CHANGED
@@ -28,9 +28,22 @@ class AdvancedPdfChatbot:
|
|
28 |
os.environ["OPENAI_API_KEY"] = openai_api_key
|
29 |
self.embeddings = OpenAIEmbeddings()
|
30 |
self.text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
|
31 |
-
self.llm = OpenAIChat(temperature=0,model_name='gpt-3.5-turbo'
|
|
|
32 |
self.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
33 |
self.qa_chain = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
|
35 |
def load_and_process_pdf(self, pdf_path):
|
36 |
loader = PyPDFLoader(pdf_path)
|
@@ -44,6 +57,7 @@ class AdvancedPdfChatbot:
|
|
44 |
self.llm,
|
45 |
retriever=self.db.as_retriever(),
|
46 |
memory=self.memory
|
|
|
47 |
)
|
48 |
|
49 |
def chat(self, query):
|
|
|
28 |
os.environ["OPENAI_API_KEY"] = openai_api_key
|
29 |
self.embeddings = OpenAIEmbeddings()
|
30 |
self.text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
|
31 |
+
self.llm = OpenAIChat(temperature=0,model_name='gpt-3.5-turbo')
|
32 |
+
|
33 |
self.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
34 |
self.qa_chain = None
|
35 |
+
self.template = """
|
36 |
+
You are a study partner assistant, students give you pdfs
|
37 |
+
and you help them to answer their questions.
|
38 |
+
|
39 |
+
Answer the question based on the most recent provided resources only.
|
40 |
+
Give the most relevant answer.
|
41 |
+
|
42 |
+
Context: {context}
|
43 |
+
Question: {question}
|
44 |
+
Answer:
|
45 |
+
"""
|
46 |
+
self.prompt = PromptTemplate(template=self.template, input_variables=["context", "question"])
|
47 |
|
48 |
def load_and_process_pdf(self, pdf_path):
|
49 |
loader = PyPDFLoader(pdf_path)
|
|
|
57 |
self.llm,
|
58 |
retriever=self.db.as_retriever(),
|
59 |
memory=self.memory
|
60 |
+
combine_docs_chain_kwargs={"prompt": self.prompt}
|
61 |
)
|
62 |
|
63 |
def chat(self, query):
|