Spaces:
Sleeping
Sleeping
from langchain.chains import ConversationalRetrievalChain | |
from langchain.chat_models import ChatOpenAI | |
from langchain.document_loaders import TextLoader | |
from langchain.document_loaders import PyPDFLoader | |
from langchain.vectorstores.faiss import FAISS | |
from langchain.embeddings import OpenAIEmbeddings | |
from langchain.text_splitter import CharacterTextSplitter | |
import panel as pn | |
import os | |
import tempfile | |
from langchain.chains import RetrievalQA | |
file_input = pn.widgets.FileInput(width=300) | |
openaikey = pn.widgets.PasswordInput( | |
value="", placeholder="Enter your OpenAI API Key here...", width=300 | |
) | |
prompt = pn.widgets.TextEditor( | |
value="", placeholder="Enter your questions here...", height=160, toolbar=False | |
) | |
run_button = pn.widgets.Button(name="Run!") | |
select_k = pn.widgets.IntSlider( | |
name="Number of relevant chunks", start=1, end=5, step=1, value=2 | |
) | |
select_chain_type = pn.widgets.RadioButtonGroup( | |
name='Chain type', | |
options=['stuff', 'map_reduce', "refine", "map_rerank"] | |
) | |
widgets = pn.Row( | |
pn.Column(prompt, run_button, margin=5), | |
pn.Card( | |
"Chain type:", | |
pn.Column(select_chain_type, select_k), | |
title="Advanced settings", margin=10 | |
), width=600 | |
) | |
def qa(file, query, chain_type, k): | |
loader = PyPDFLoader(file) | |
documents = loader.load() | |
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) | |
texts = text_splitter.split_documents(documents) | |
embeddings = OpenAIEmbeddings() | |
vector_store = FAISS.from_documents(texts, embeddings) | |
retriever = vector_store.as_retriever( | |
search_type="similarity", search_kwargs={"k": k}) | |
model = ChatOpenAI(model='gpt-3.5-turbo') | |
# qa = ConversationalRetrievalChain.from_llm( | |
# model, retriever=retriever, chain_type=chain_type) | |
qa = RetrievalQA.from_chain_type( | |
model, chain_type=chain_type, retriever=retriever, return_source_documents=False) | |
result = qa({"query": query}) | |
# print(result['result']) | |
# return result['answer'] | |
return result['result'] | |
convos = [] # store all panel objects in a list | |
def temfile_create(file_input): | |
with tempfile.NamedTemporaryFile(suffix='.pdf', delete=False) as temp_file: | |
temp_file.write(file_input.value) | |
temp_file.flush() | |
temp_file.seek(0) | |
# Do something with the temporary file here, such as passing the file path to another function | |
return temp_file.name | |
def qa_result(_): | |
os.environ["OPENAI_API_KEY"] = openaikey.value | |
if file_input.value is not None: | |
# file_input.save("/.cache/temp.pdf") | |
pdf_file = temfile_create(file_input) | |
prompt_text = prompt.value | |
if prompt_text: | |
result = qa(file=pdf_file, query=prompt_text, | |
chain_type=select_chain_type.value, k=select_k.value) | |
convos.extend([ | |
pn.Row( | |
pn.panel("\U0001F60A", width=10), | |
prompt_text, | |
width=600 | |
), | |
pn.Row( | |
pn.panel("\U0001F916", width=10), | |
# result['answer'], | |
result, | |
width=600 | |
) | |
]) | |
return pn.Column(*convos, margin=15, width=575, min_height=400) | |
qa_interactive = pn.panel( | |
pn.bind(qa_result, run_button), | |
loading_indicator=True, | |
) | |
output = pn.WidgetBox('*Output will show up here:*', | |
qa_interactive, width=630, scroll=True) | |
# layout | |
# try: | |
pn.Column( | |
pn.pane.Markdown(""" | |
## \U0001F60A! Question Answering with your PDF file | |
Step 1: Upload a PDF file \n | |
Step 2: Enter your OpenAI API key. This costs $$. You will need to set up billing info at [OpenAI](https://platform.openai.com/account). \n | |
Step 3: Type your question at the bottom and click "Run" \n | |
"""), | |
pn.Row(file_input, openaikey), | |
output, | |
widgets | |
).servable() | |
# except Exception as ex: | |
# pass | |