Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,8 +3,9 @@ import asyncio
|
|
3 |
import os
|
4 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
5 |
from langchain.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate
|
6 |
-
from
|
7 |
-
from
|
|
|
8 |
from PyPDF2 import PdfReader
|
9 |
import aiohttp
|
10 |
from io import BytesIO
|
@@ -37,8 +38,8 @@ class RetrievalAugmentedQAPipeline:
|
|
37 |
formatted_system_prompt = system_role_prompt.format()
|
38 |
formatted_user_prompt = user_role_prompt.format(question=user_query, context=context_prompt)
|
39 |
|
40 |
-
response = await self.llm.
|
41 |
-
return {"response": response.
|
42 |
|
43 |
# PDF processing functions
|
44 |
async def fetch_pdf(session, url):
|
|
|
3 |
import os
|
4 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
5 |
from langchain.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate
|
6 |
+
from langchain.vectorstores import Chroma
|
7 |
+
from langchain.embeddings import OpenAIEmbeddings
|
8 |
+
from langchain.chat_models import ChatOpenAI
|
9 |
from PyPDF2 import PdfReader
|
10 |
import aiohttp
|
11 |
from io import BytesIO
|
|
|
38 |
formatted_system_prompt = system_role_prompt.format()
|
39 |
formatted_user_prompt = user_role_prompt.format(question=user_query, context=context_prompt)
|
40 |
|
41 |
+
response = await self.llm.agenerate([formatted_system_prompt, formatted_user_prompt])
|
42 |
+
return {"response": response.generations[0][0].text, "context": context_list}
|
43 |
|
44 |
# PDF processing functions
|
45 |
async def fetch_pdf(session, url):
|