Spaces:
Runtime error
Runtime error
import gradio as gr | |
from langchain.document_loaders import PyPDFLoader | |
from langchain.embeddings import HuggingFaceEmbeddings | |
from langchain.chat_models import ChatOpenAI | |
from langchain.chains import RetrievalQA | |
from langchain.text_splitter import RecursiveCharacterTextSplitter | |
from langchain.vectorstores import Chroma | |
embeddings = HuggingFaceEmbeddings() | |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=0) | |
def chat(openai_key,message, history,file): | |
history = history or [] | |
loader = PyPDFLoader(file.name) | |
documents = loader.load() | |
texts = text_splitter.split_documents(documents) | |
vectordb = Chroma.from_documents(texts, embeddings) | |
qa = RetrievalQA.from_chain_type(llm=ChatOpenAI(temperature=0,model_name="gpt-3.5-turbo",openai_api_key=openai_key), chain_type="stuff", retriever=vectordb.as_retriever()) | |
answer = qa.run(message) | |
history.append((message, answer)) | |
return history, history | |
inputs = [ | |
gr.Textbox(lines=1, label="OpenAI API Key",placeholder="openai api here...", type="password"), | |
gr.Textbox(lines=2,label="prompt"), | |
gr.State([]), | |
gr.File(label="PDF File") | |
] | |
outputs = [ | |
gr.Chatbot(label="Chatbot"), | |
gr.State([]) | |
] | |
app = gr.Interface( | |
fn=chat, | |
inputs=inputs, | |
outputs=outputs, | |
allow_flagging='never', | |
) | |
if __name__ == '__main__': | |
app.launch() |