Update app.py
Browse files
app.py
CHANGED
|
@@ -15,6 +15,7 @@ from langchain.vectorstores import Chroma
|
|
| 15 |
from langchain.chains import RetrievalQA
|
| 16 |
|
| 17 |
from langchain.memory import ConversationBufferMemory
|
|
|
|
| 18 |
|
| 19 |
def loading_pdf():
|
| 20 |
return "Loading..."
|
|
@@ -26,7 +27,16 @@ def pdf_changes(pdf_doc):
|
|
| 26 |
db = Chroma.from_documents(texts, embeddings)
|
| 27 |
retriever = db.as_retriever()
|
| 28 |
global qa
|
| 29 |
-
qa = RetrievalQA.from_chain_type(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
return "Ready"
|
| 31 |
|
| 32 |
def add_text(history, text):
|
|
|
|
| 15 |
from langchain.chains import RetrievalQA
|
| 16 |
|
| 17 |
from langchain.memory import ConversationBufferMemory
|
| 18 |
+
from langchain.chains import ConversationChain
|
| 19 |
|
| 20 |
def loading_pdf():
|
| 21 |
return "Loading..."
|
|
|
|
| 27 |
db = Chroma.from_documents(texts, embeddings)
|
| 28 |
retriever = db.as_retriever()
|
| 29 |
global qa
|
| 30 |
+
qa = RetrievalQA.from_chain_type(
|
| 31 |
+
llm=OpenAI(temperature=0.5),
|
| 32 |
+
chain_type="stuff",
|
| 33 |
+
conversation = ConversationChain(
|
| 34 |
+
llm=llm,
|
| 35 |
+
verbose=True,
|
| 36 |
+
memory=ConversationBufferMemory()
|
| 37 |
+
),
|
| 38 |
+
retriever=retriever,
|
| 39 |
+
return_source_documents=True)
|
| 40 |
return "Ready"
|
| 41 |
|
| 42 |
def add_text(history, text):
|