saritha commited on
Commit
c12a4ac
·
verified ·
1 Parent(s): 8cd504a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -8
app.py CHANGED
@@ -13,9 +13,6 @@ async def initialize(file_path, question):
13
  genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
14
  model = genai.GenerativeModel('gemini-pro')
15
  model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
16
- # genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
17
- # model = genai.GenerativeModel('gemini-pro')
18
- # model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
19
  prompt_template = """Answer the question as precise as possible using the provided context. If the answer is
20
  not contained in the context, say "answer not available in context" \n\n
21
  Context: \n {context}?\n
@@ -28,8 +25,9 @@ async def initialize(file_path, question):
28
  pages = pdf_loader.load_and_split()
29
  context = "\n".join(f"Page {i+1}: {page.page_content}" for i, page in enumerate(pages[:30]))
30
  stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
31
- # Refactor the below line to make sure it returns an awaitable object
32
- stuff_answer = await stuff_chain.arun({"input_documents": pages, "question": question, "context": context})
 
33
 
34
  # Extract the page number where the context was found
35
  sources = []
@@ -45,7 +43,7 @@ async def initialize(file_path, question):
45
  # Add the clickable link to the source
46
  file_name = os.path.basename(file_path)
47
  source_link = f"[{file_name}](file://{os.path.abspath(file_path)})"
48
- return f"{stuff_answer['output_text']} {source_str} - [Document: {source_link}]"
49
  else:
50
  return "Error: Unable to process the document. Please ensure the PDF file is valid."
51
 
@@ -59,5 +57,5 @@ async def pdf_qa(file, question):
59
  answer = await initialize(file.name, question)
60
  return answer
61
 
62
- # Create Gradio Interface
63
- gr.Interface(fn=pdf_qa, inputs=[input_file, input_question], outputs=output_text, title="PDF Question Answering System", description="Upload a PDF file and ask questions about the content.").launch()
 
13
  genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
14
  model = genai.GenerativeModel('gemini-pro')
15
  model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
 
 
 
16
  prompt_template = """Answer the question as precise as possible using the provided context. If the answer is
17
  not contained in the context, say "answer not available in context" \n\n
18
  Context: \n {context}?\n
 
25
  pages = pdf_loader.load_and_split()
26
  context = "\n".join(f"Page {i+1}: {page.page_content}" for i, page in enumerate(pages[:30]))
27
  stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
28
+
29
+ # Use ainvoke instead of arun
30
+ stuff_answer = await stuff_chain.ainvoke({"input_documents": pages, "question": question, "context": context})
31
 
32
  # Extract the page number where the context was found
33
  sources = []
 
43
  # Add the clickable link to the source
44
  file_name = os.path.basename(file_path)
45
  source_link = f"[{file_name}](file://{os.path.abspath(file_path)})"
46
+ return f"{stuff_answer} {source_str} - [Document: {source_link}]"
47
  else:
48
  return "Error: Unable to process the document. Please ensure the PDF file is valid."
49
 
 
57
  answer = await initialize(file.name, question)
58
  return answer
59
 
60
+ # Create Gradio Interface with share=True to enable a public link
61
+ gr.Interface(fn=pdf_qa, inputs=[input_file, input_question], outputs=output_text, title="PDF Question Answering System", description="Upload a PDF file and ask questions about the content.").launch(share=True)