File size: 3,771 Bytes
2f0e211
 
 
d05ba12
2f0e211
3e93b01
2f0e211
3e93b01
2f0e211
 
 
e455307
 
895d964
 
d05ba12
2f0e211
bbb69a1
2f0e211
e455307
 
 
afbac3b
3e93b01
e455307
3e93b01
e455307
 
1c54445
bbb69a1
895d964
 
e455307
2f0e211
bbb69a1
a08bac4
 
 
 
 
2f0e211
d05ba12
 
2f0e211
 
 
 
 
b1c579e
895d964
 
00e09c1
2f0e211
 
 
 
 
895d964
2f0e211
 
 
 
 
d05ba12
 
895d964
d05ba12
 
 
 
 
895d964
d05ba12
 
 
e455307
2f0e211
 
 
 
 
b91cab8
2f0e211
895d964
 
2f0e211
 
 
 
 
 
 
 
 
 
 
b505ef9
a08bac4
2f0e211
bbb69a1
e455307
2f0e211
a08bac4
e455307
 
a08bac4
2f0e211
 
 
 
e455307
 
2f0e211
e455307
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
import gradio as gr
import os
import time
import threading
from langchain.document_loaders import OnlinePDFLoader
from langchain.text_splitter import CharacterTextSplitter
from langchain.llms import OpenAI
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import Chroma
from langchain.chains import ConversationalRetrievalChain

os.environ['OPENAI_API_KEY'] = os.getenv("Your_API_Key")

# Global variable for tracking last interaction time
last_interaction_time = 0

def loading_pdf():
    return "Working on the upload..."

def pdf_changes(pdf_doc):
    loader = OnlinePDFLoader(pdf_doc.name)
    documents = loader.load()
    text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
    texts = text_splitter.split_documents(documents)
    embeddings = OpenAIEmbeddings()
    db = Chroma.from_documents(texts, embeddings)
    retriever = db.as_retriever()
    global qa 
    qa = ConversationalRetrievalChain.from_llm(
        llm=OpenAI(temperature=0.2, engine='GPT3-turbo', max_tokens=16000),  
        retriever=retriever, 
        return_source_documents=False)
    return "Ready"


def clear_data():
    global qa
    qa = None
    return "Data cleared"

def add_text(history, text):
    global last_interaction_time
    last_interaction_time = time.time()
    history = history + [(text, None)]
    return history, ""

def bot(history):
    response = infer(history[-1][0], history)
    formatted_response = "**Bot:**  \n" + '  \n'.join(response.split('. '))
    history[-1][1] = formatted_response
    return history

def infer(question, history):
    res = []
    for human, ai in history[:-1]:
        pair = (human, ai)
        res.append(pair)
    
    chat_history = res
    query = question
    result = qa({"question": query, "chat_history": chat_history})
    return result["answer"]

def auto_clear_data():
    global qa, last_interaction_time
    if time.time() - last_interaction_time > 600:
        qa = None

def periodic_clear():
    while True:
        auto_clear_data()
        time.sleep(60)

threading.Thread(target=periodic_clear).start()

css = """
#col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
"""

title = """
<div style="text-align: center;max-width: 700px;">
    <h1>CauseWriter Chat with PDF • OpenAI</h1>
    <p style="text-align: center;">Upload a .PDF from your computer, click the "Load PDF to LangChain" button, <br />
    when everything is ready, you can start asking questions about the pdf. <br />
    This version is set to store chat history and uses OpenAI as LLM.</p>
</div>
"""

with gr.Blocks(css=css) as demo:
    with gr.Column(elem_id="col-container"):
        gr.HTML(title)
        
        with gr.Column():
            pdf_doc = gr.File(label="Load a pdf", file_types=['.pdf'], type="file")
            with gr.Row():
                langchain_status = gr.Textbox(label="Status", placeholder="", interactive=False)
                load_pdf = gr.Button("Convert PDF to Magic AI language")
                clear_btn = gr.Button("Clear Data")
        
        chatbot = gr.Chatbot([], elem_id="chatbot").style(height=450)
        question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter")
        submit_btn = gr.Button("Send Message")

    load_pdf.click(loading_pdf, None, langchain_status, queue=False)
    load_pdf.click(pdf_changes, inputs=[pdf_doc], outputs=[langchain_status], queue=False)
    clear_btn.click(clear_data, outputs=[langchain_status], queue=False)
    question.submit(add_text, [chatbot, question], [chatbot, question]).then(
        bot, chatbot, chatbot
    )
    submit_btn.click(add_text, [chatbot, question], [chatbot, question]).then(
        bot, chatbot, chatbot
    )

demo.launch()