File size: 3,988 Bytes
2f0e211 d05ba12 2f0e211 d05ba12 2f0e211 e455307 d05ba12 2f0e211 b91cab8 2f0e211 e455307 f8e7afd e455307 1c54445 d05ba12 e455307 2f0e211 a08bac4 2f0e211 d05ba12 2f0e211 b1c579e 2f0e211 00e09c1 2f0e211 00e09c1 2f0e211 d05ba12 e455307 2f0e211 b91cab8 2f0e211 e455307 2f0e211 b505ef9 a08bac4 2f0e211 a08bac4 e455307 2f0e211 a08bac4 e455307 a08bac4 2f0e211 e455307 2f0e211 e455307 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 |
import gradio as gr
from gradio import state
import os
import time
import threading
from langchain.document_loaders import OnlinePDFLoader
from langchain.text_splitter import CharacterTextSplitter
from langchain.llms import OpenAI
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import Chroma
from langchain.chains import ConversationalRetrievalChain
os.environ['OPENAI_API_KEY'] = os.getenv("Your_API_Key")
# Declare session state for tracking last interaction time
last_interaction_time = state.declare("last_interaction_time", 0)
def loading_pdf():
return "Working the upload. Also, pondering the usefulness of sporks..."
def pdf_changes(pdf_doc):
loader = OnlinePDFLoader(pdf_doc.name)
documents = loader.load()
text_splitter = CharacterTextSplitter(chunk_size=500, chunk_overlap=50)
texts = text_splitter.split_documents(documents)
embeddings = OpenAIEmbeddings()
db = Chroma.from_documents(texts, embeddings)
retriever = db.as_retriever()
global qa
qa = ConversationalRetrievalChain.from_llm(
llm=OpenAI(temperature=0.5),
retriever=retriever,
return_source_documents=False
)
return "Ready"
def clear_data():
global qa
qa = None
return "Data cleared"
def add_text(history, text):
global last_interaction_time
last_interaction_time = time.time()
history = history + [(text, None)]
return history, ""
def bot(history):
response = infer(history[-1][0], history)
formatted_response = "**Bot:** \n" + ' \n'.join(response.split('. '))
history[-1][1] = ""
for character in formatted_response:
history[-1][1] += character
time.sleep(0.05)
yield history
def infer(question, history):
res = []
for human, ai in history[:-1]:
pair = (human, ai)
res.append(pair)
chat_history = res
query = question
result = qa({"question": query, "chat_history": chat_history})
return result["answer"]
def auto_clear_data():
global qa, last_interaction_time
if time.time() - last_interaction_time > 600: # 600 seconds = 10 minutes
qa = None
def periodic_clear():
while True:
auto_clear_data()
time.sleep(60) # Check every minute
threading.Thread(target=periodic_clear).start()
css = """
#col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
"""
title = """
<div style="text-align: center;max-width: 700px;">
<h1>CauseWriter Chat with PDF • OpenAI</h1>
<p style="text-align: center;">Upload a .PDF from your computer, click the "Load PDF to LangChain" button, <br />
when everything is ready, you can start asking questions about the pdf ;) <br />
This version is set to store chat history, and uses OpenAI as LLM.</p>
</div>
"""
with gr.Blocks(css=css) as demo:
with gr.Column(elem_id="col-container"):
gr.HTML(title)
with gr.Column():
pdf_doc = gr.File(label="Load a pdf", file_types=['.pdf'], type="file")
with gr.Row():
langchain_status = gr.Textbox(label="Status", placeholder="", interactive=False)
load_pdf = gr.Button("Convert PDF to Magic AI language")
clear_btn = gr.Button("Clear Data")
chatbot = gr.Chatbot([], elem_id="chatbot").style(height=350)
question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter")
submit_btn = gr.Button("Send Message")
load_pdf.click(loading_pdf, None, langchain_status, queue=False)
load_pdf.click(pdf_changes, inputs=[pdf_doc], outputs=[langchain_status], queue=False)
clear_btn.click(clear_data, outputs=[langchain_status], queue=False)
question.submit(add_text, [chatbot, question], [chatbot, question]).then(
bot, chatbot, chatbot
)
submit_btn.click(add_text, [chatbot, question], [chatbot, question]).then(
bot, chatbot, chatbot
)
demo.launch()
|