saritha commited on
Commit
6f97d1c
·
verified ·
1 Parent(s): 34be9dd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -7
app.py CHANGED
@@ -1,13 +1,16 @@
1
  import os
2
  import gradio as gr
3
  import asyncio
 
4
  from langchain_core.prompts import PromptTemplate
5
- from langchain_community.output_parsers.rail_parser import GuardrailsOutputParser
6
  from langchain_community.document_loaders import PyPDFLoader
7
  from langchain_google_genai import ChatGoogleGenerativeAI
8
  import google.generativeai as genai
9
  from langchain.chains.question_answering import load_qa_chain # Import load_qa_chain
10
 
 
 
 
11
  async def initialize(file_path, question):
12
  genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
13
  model = genai.GenerativeModel('gemini-pro')
@@ -58,7 +61,6 @@ async def initialize(file_path, question):
58
 
59
  # Generate links for each top page
60
  file_name = os.path.basename(file_path)
61
- # Use a general link format with instructions for manual navigation if automatic links are not supported
62
  page_links = [f"[Page {p}](file://{os.path.abspath(file_path)})" for p in top_pages]
63
  page_links_str = ', '.join(page_links)
64
 
@@ -70,14 +72,28 @@ async def initialize(file_path, question):
70
  # Create a clickable link for the document
71
  source_link = f"[Document: {file_name}](file://{os.path.abspath(file_path)})"
72
 
 
 
 
 
 
 
 
 
 
 
73
  return f"Answer: {answer}\n{source_str}\n{source_link}"
74
  else:
75
  return "Error: Unable to process the document. Please ensure the PDF file is valid."
76
 
77
- # Define Gradio Interface
78
  input_file = gr.File(label="Upload PDF File")
79
  input_question = gr.Textbox(label="Ask about the document")
80
- output_text = gr.Textbox(label="Answer and Top Pages")
 
 
 
 
81
 
82
  async def pdf_qa(file, question):
83
  if file is None:
@@ -86,6 +102,24 @@ async def pdf_qa(file, question):
86
  answer = await initialize(file.name, question)
87
  return answer
88
 
89
- # Create Gradio Interface with share=True to enable a public link
90
- gr.Interface(fn=pdf_qa, inputs=[input_file, input_question], outputs=output_text, title="PDF Question Answering System", description="Upload a PDF file and ask questions about the content.").launch(share=True)
91
- the content.").launch(share=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
  import gradio as gr
3
  import asyncio
4
+ from datetime import datetime
5
  from langchain_core.prompts import PromptTemplate
 
6
  from langchain_community.document_loaders import PyPDFLoader
7
  from langchain_google_genai import ChatGoogleGenerativeAI
8
  import google.generativeai as genai
9
  from langchain.chains.question_answering import load_qa_chain # Import load_qa_chain
10
 
11
+ # Initialize an empty list to store chat history
12
+ chat_history = []
13
+
14
  async def initialize(file_path, question):
15
  genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
16
  model = genai.GenerativeModel('gemini-pro')
 
61
 
62
  # Generate links for each top page
63
  file_name = os.path.basename(file_path)
 
64
  page_links = [f"[Page {p}](file://{os.path.abspath(file_path)})" for p in top_pages]
65
  page_links_str = ', '.join(page_links)
66
 
 
72
  # Create a clickable link for the document
73
  source_link = f"[Document: {file_name}](file://{os.path.abspath(file_path)})"
74
 
75
+ # Save interaction to chat history
76
+ timestamp = datetime.now().isoformat()
77
+ chat_history.append({
78
+ 'timestamp': timestamp,
79
+ 'question': question,
80
+ 'answer': answer,
81
+ 'source': source_str,
82
+ 'document_link': source_link
83
+ })
84
+
85
  return f"Answer: {answer}\n{source_str}\n{source_link}"
86
  else:
87
  return "Error: Unable to process the document. Please ensure the PDF file is valid."
88
 
89
+ # Define Gradio Interface for QA and Chat History
90
  input_file = gr.File(label="Upload PDF File")
91
  input_question = gr.Textbox(label="Ask about the document")
92
+ output_text = gr.Textbox(label="Answer and Top Pages", lines=10, max_lines=10)
93
+
94
+ def get_chat_history():
95
+ history_str = "\n".join([f"Q: {entry['question']}\nA: {entry['answer']}\n{entry['source']}\n{entry['document_link']}\nTimestamp: {entry['timestamp']}\n" for entry in chat_history])
96
+ return history_str
97
 
98
  async def pdf_qa(file, question):
99
  if file is None:
 
102
  answer = await initialize(file.name, question)
103
  return answer
104
 
105
+ # Create Gradio Interfaces
106
+ qa_interface = gr.Interface(
107
+ fn=pdf_qa,
108
+ inputs=[input_file, input_question],
109
+ outputs=output_text,
110
+ title="PDF Question Answering System",
111
+ description="Upload a PDF file and ask questions about the content."
112
+ )
113
+
114
+ history_interface = gr.Interface(
115
+ fn=get_chat_history,
116
+ inputs=[],
117
+ outputs=gr.Textbox(label="Chat History", lines=20, max_lines=20),
118
+ title="Chat History",
119
+ description="View the history of interactions."
120
+ )
121
+
122
+ # Launch both interfaces
123
+ qa_interface.launch(share=True)
124
+ history_interface.launch(share=True)
125
+