saritha commited on
Commit
f32ba7f
·
verified ·
1 Parent(s): fd9a79e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -10
app.py CHANGED
@@ -13,7 +13,7 @@ async def initialize(file_path, question):
13
  model = genai.GenerativeModel('gemini-pro')
14
  model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
15
 
16
- # Refined prompt template to encourage precise and concise answers
17
  prompt_template = """Answer the question precisely and concisely using the provided context. Avoid any additional commentary or system messages.
18
  If the answer is not contained in the context, respond with "answer not available in context".
19
 
@@ -30,20 +30,22 @@ async def initialize(file_path, question):
30
  if os.path.exists(file_path):
31
  pdf_loader = PyPDFLoader(file_path)
32
  pages = pdf_loader.load_and_split()
33
- page_contexts = [f"Page {i+1}: {page.page_content}" for i, page in enumerate(pages[:30])]
34
- context = "\n".join(page_contexts)
 
 
 
 
35
  stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
36
 
37
- # Use ainvoke to get the result
38
  stuff_answer = await stuff_chain.ainvoke({"input_documents": pages, "question": question, "context": context})
39
-
40
- # Access the correct key for the answer
41
  answer = stuff_answer.get('output_text', '').strip()
42
-
43
- # Find the most relevant pages by searching for content overlap with the answer
44
  relevant_pages = []
45
  for i, page in enumerate(pages):
46
- if any(phrase in page.page_content for phrase in answer.split()):
47
  relevant_pages.append(f"Page {i+1}")
48
 
49
  if relevant_pages:
@@ -51,7 +53,7 @@ async def initialize(file_path, question):
51
  else:
52
  source_str = " (Source: Not found in specific page)"
53
 
54
- # Add the clickable link to the source
55
  file_name = os.path.basename(file_path)
56
  source_link = f"[{file_name}](file://{os.path.abspath(file_path)})"
57
  return f"{answer} {source_str} - [Document: {source_link}]"
 
13
  model = genai.GenerativeModel('gemini-pro')
14
  model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
15
 
16
+ # Prompt template for precise answers
17
  prompt_template = """Answer the question precisely and concisely using the provided context. Avoid any additional commentary or system messages.
18
  If the answer is not contained in the context, respond with "answer not available in context".
19
 
 
30
  if os.path.exists(file_path):
31
  pdf_loader = PyPDFLoader(file_path)
32
  pages = pdf_loader.load_and_split()
33
+
34
+ # Extract content from each page and store along with page number
35
+ page_contexts = [f"Page {i+1}: {page.page_content}" for i, page in enumerate(pages)]
36
+ context = "\n".join(page_contexts[:30]) # Using the first 30 pages for context
37
+
38
+ # Load the question-answering chain
39
  stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
40
 
41
+ # Get the answer from the model
42
  stuff_answer = await stuff_chain.ainvoke({"input_documents": pages, "question": question, "context": context})
 
 
43
  answer = stuff_answer.get('output_text', '').strip()
44
+
45
+ # Identify the pages that contain the answer
46
  relevant_pages = []
47
  for i, page in enumerate(pages):
48
+ if answer.lower() in page.page_content.lower():
49
  relevant_pages.append(f"Page {i+1}")
50
 
51
  if relevant_pages:
 
53
  else:
54
  source_str = " (Source: Not found in specific page)"
55
 
56
+ # Create a clickable link for the document
57
  file_name = os.path.basename(file_path)
58
  source_link = f"[{file_name}](file://{os.path.abspath(file_path)})"
59
  return f"{answer} {source_str} - [Document: {source_link}]"