Technocoloredgeek commited on
Commit
6f82650
·
verified ·
1 Parent(s): 2d41cae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -8
app.py CHANGED
@@ -41,7 +41,7 @@ class RetrievalAugmentedQAPipeline:
41
  messages = chat_prompt.format_prompt(context=context_prompt, question=user_query).to_messages()
42
 
43
  response = await self.llm.agenerate([messages])
44
- return {"response": response.generations[0][0].text, "context": context_list}
45
 
46
  # PDF processing functions
47
  async def fetch_pdf(session, url):
@@ -49,7 +49,6 @@ async def fetch_pdf(session, url):
49
  if response.status == 200:
50
  return await response.read()
51
  else:
52
- st.error(f"Failed to fetch PDF from {url}")
53
  return None
54
 
55
  async def process_pdf(pdf_content):
@@ -78,8 +77,6 @@ async def main():
78
  chunks = await process_pdf(pdf_content)
79
  all_chunks.extend(chunks)
80
 
81
- st.write(f"Created {len(all_chunks)} chunks from {len(pdf_urls)} PDF files")
82
-
83
  embeddings = OpenAIEmbeddings()
84
  vector_db = Chroma.from_texts(all_chunks, embeddings)
85
 
@@ -99,7 +96,3 @@ if user_query:
99
 
100
  st.write("Response:")
101
  st.write(result["response"])
102
-
103
- st.write("Context used:")
104
- for i, context in enumerate(result["context"], 1):
105
- st.write(f"{i}. {context[:100]}...")
 
41
  messages = chat_prompt.format_prompt(context=context_prompt, question=user_query).to_messages()
42
 
43
  response = await self.llm.agenerate([messages])
44
+ return {"response": response.generations[0][0].text}
45
 
46
  # PDF processing functions
47
  async def fetch_pdf(session, url):
 
49
  if response.status == 200:
50
  return await response.read()
51
  else:
 
52
  return None
53
 
54
  async def process_pdf(pdf_content):
 
77
  chunks = await process_pdf(pdf_content)
78
  all_chunks.extend(chunks)
79
 
 
 
80
  embeddings = OpenAIEmbeddings()
81
  vector_db = Chroma.from_texts(all_chunks, embeddings)
82
 
 
96
 
97
  st.write("Response:")
98
  st.write(result["response"])