surabhic commited on
Commit
c8926e6
·
verified ·
1 Parent(s): 7e72703

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -1,5 +1,5 @@
1
  import os
2
- from langchain_community.llms import HuggingFaceHub
3
  from langchain.chains.question_answering import load_qa_chain
4
  from langchain.vectorstores import FAISS
5
  from langchain.embeddings import HuggingFaceEmbeddings
@@ -7,7 +7,7 @@ from langchain.text_splitter import CharacterTextSplitter
7
  from langchain_community.document_loaders import PyPDFLoader, TextLoader
8
  import gradio as gr
9
 
10
- # Load documents (example using PDFs or TXT)
11
  def load_docs(file):
12
  if file.name.endswith(".pdf"):
13
  loader = PyPDFLoader(file.name)
@@ -26,15 +26,16 @@ def embed_docs(splits):
26
  embeddings = HuggingFaceEmbeddings()
27
  return FAISS.from_documents(splits, embeddings)
28
 
29
- # QA chain
30
  def qa_chain(vectorstore, query):
31
- llm = HuggingFaceHub(
32
  repo_id="google/flan-t5-large",
33
- model_kwargs={"temperature": 0.5, "max_length": 512}
 
34
  )
35
  chain = load_qa_chain(llm, chain_type="stuff")
36
  docs = vectorstore.similarity_search(query)
37
- return chain.run(input_documents=docs, question=query)
38
 
39
  # Gradio interface
40
  def analyze(file, question):
 
1
  import os
2
+ from langchain_huggingface import HuggingFaceEndpoint
3
  from langchain.chains.question_answering import load_qa_chain
4
  from langchain.vectorstores import FAISS
5
  from langchain.embeddings import HuggingFaceEmbeddings
 
7
  from langchain_community.document_loaders import PyPDFLoader, TextLoader
8
  import gradio as gr
9
 
10
+ # Load documents (PDF or TXT)
11
  def load_docs(file):
12
  if file.name.endswith(".pdf"):
13
  loader = PyPDFLoader(file.name)
 
26
  embeddings = HuggingFaceEmbeddings()
27
  return FAISS.from_documents(splits, embeddings)
28
 
29
+ # QA chain using updated HuggingFaceEndpoint and invoke method
30
  def qa_chain(vectorstore, query):
31
+ llm = HuggingFaceEndpoint(
32
  repo_id="google/flan-t5-large",
33
+ huggingfacehub_api_token=os.environ["HUGGINGFACEHUB_API_TOKEN"],
34
+ model_kwargs={"temperature": 0.5, "max_new_tokens": 512}
35
  )
36
  chain = load_qa_chain(llm, chain_type="stuff")
37
  docs = vectorstore.similarity_search(query)
38
+ return chain.invoke({"input_documents": docs, "question": query})
39
 
40
  # Gradio interface
41
  def analyze(file, question):