Vageesh1 commited on
Commit
15b3a93
·
1 Parent(s): f0e3ac2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -31,21 +31,22 @@ from langchain.vectorstores import FAISS
31
  from langchain.chains.question_answering import load_qa_chain
32
  from langchain.chains import RetrievalQA
33
  from langchain import HuggingFacePipeline
 
34
 
35
- from helper import pdf_loader,splitDoc,makeEmbeddings,create_flan_t5_base
36
 
 
37
  def conversational_chat(chain,query):
38
  result = chain({"question": query,
39
  "chat_history": st.session_state['history']})
40
  st.session_state['history'].append((query, result["answer"]))
41
 
42
  return result["answer"]
43
-
44
 
45
 
46
  def ui():
47
  st.title('PDF Question Answer Bot')
48
- # hugging_face_key = os.environ["HUGGINGFACE_HUB_TOKEN"]
49
  llm = create_flan_t5_base(load_in_8bit=False)
50
  hf_llm = HuggingFacePipeline(pipeline=llm)
51
 
@@ -61,20 +62,20 @@ def ui():
61
  vector_database = makeEmbeddings(pdf_doc)
62
  #making the retriever of the vector database
63
  retriever = vector_database.as_retriever(search_kwargs={"k":4})
64
- qa_chain = RetrievalQA.from_chain_type(llm=hf_llm, chain_type="stuff",retriever=retriever)
 
65
 
66
  # Create an empty container to hold the PDF loader section
67
  pdf_loader_container = st.empty()
68
 
69
  # Check if the PDF file is uploaded or not
70
  if uploaded_file is not None:
71
- print("The file has been uploaded successfully")
72
  # Hide the PDF loader interface when the file is uploaded
73
  pdf_loader_container.empty()
74
  # Show the chat interface
75
  show_chat_interface(qa_chain)
76
 
77
-
78
  def show_chat_interface(qa_chain):
79
  if 'history' not in st.session_state:
80
  st.session_state['history'] = []
 
31
  from langchain.chains.question_answering import load_qa_chain
32
  from langchain.chains import RetrievalQA
33
  from langchain import HuggingFacePipeline
34
+ from langchain.chains import ConversationalRetrievalChain
35
 
36
+ from helper import conversational_chat,pdf_loader,splitDoc,makeEmbeddings,create_flan_t5_base
37
 
38
+
39
  def conversational_chat(chain,query):
40
  result = chain({"question": query,
41
  "chat_history": st.session_state['history']})
42
  st.session_state['history'].append((query, result["answer"]))
43
 
44
  return result["answer"]
 
45
 
46
 
47
  def ui():
48
  st.title('PDF Question Answer Bot')
49
+ hugging_face_key = os.environ["HUGGINGFACE_HUB_TOKEN"]
50
  llm = create_flan_t5_base(load_in_8bit=False)
51
  hf_llm = HuggingFacePipeline(pipeline=llm)
52
 
 
62
  vector_database = makeEmbeddings(pdf_doc)
63
  #making the retriever of the vector database
64
  retriever = vector_database.as_retriever(search_kwargs={"k":4})
65
+ qa_chain = ConversationalRetrievalChain.from_llm(llm = hf_llm,
66
+ retriever=vector_database.as_retriever())
67
 
68
  # Create an empty container to hold the PDF loader section
69
  pdf_loader_container = st.empty()
70
 
71
  # Check if the PDF file is uploaded or not
72
  if uploaded_file is not None:
73
+ st.text("The file has been uploaded successfully")
74
  # Hide the PDF loader interface when the file is uploaded
75
  pdf_loader_container.empty()
76
  # Show the chat interface
77
  show_chat_interface(qa_chain)
78
 
 
79
  def show_chat_interface(qa_chain):
80
  if 'history' not in st.session_state:
81
  st.session_state['history'] = []