File size: 1,391 Bytes
eae3ee1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import gradio as gr
from langchain.document_loaders import PyPDFLoader
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQA
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.vectorstores import Chroma

embeddings = HuggingFaceEmbeddings()
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=0)

def chat(openai_key,message, history,file):
    history = history or []

    loader = PyPDFLoader(file.name)
    documents = loader.load()

    texts = text_splitter.split_documents(documents)
    vectordb = Chroma.from_documents(texts, embeddings)
    
    qa = RetrievalQA.from_chain_type(llm=ChatOpenAI(temperature=0,model_name="gpt-3.5-turbo",openai_api_key=openai_key), chain_type="stuff", retriever=vectordb.as_retriever())
    answer = qa.run(message)

    history.append((message, answer))

    return history, history
    
    
inputs = [
    gr.Textbox(lines=1, label="OpenAI API Key",placeholder="openai api here...", type="password"),
    gr.Textbox(lines=2,label="prompt"),
    gr.State([]),
    gr.File(label="PDF File")
]

outputs = [
    gr.Chatbot(label="Chatbot"),
    gr.State([])
]

app = gr.Interface(
    fn=chat,
    inputs=inputs,
    outputs=outputs,
    allow_flagging='never',
)

if __name__ == '__main__':
    app.launch()