Sean Hamill
wonder what
cddc5c7
raw
history blame
3.89 kB
import gradio as gr
import openai
import os
import time
import shutil
from gpt_index import GPTSimpleVectorIndex, SimpleDirectoryReader
from threading import Lock
from typing import Optional, Tuple
from azure.ai.formrecognizer import DocumentAnalysisClient
from azure.core.credentials import AzureKeyCredential
os.environ['OPENAI_API_KEY'] = "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"
endpoint = "https://eastus.api.cognitive.microsoft.com/"
credential = AzureKeyCredential("844948341c6d4596b77b770cf12e386b")
form_recognizer_client = DocumentAnalysisClient(endpoint=endpoint, credential=credential)
class ChatWrapper:
def __init__(self):
self.lock = Lock()
def __call__(self, input, history: Optional[Tuple[str, str]]):
self.lock.acquire()
try:
history = history or []
new_index = GPTSimpleVectorIndex.load_from_disk('index.json')
response = new_index.query(input, verbose=True)
history.append((input, str(response)))
except Exception as e:
return gr.HTML(f"Error: {e}")
finally:
self.lock.release()
return history, history
def make_status_box_visible():
return gr.update(visible=True), gr.update(visible=False)
def create_index():
documents = SimpleDirectoryReader('data').load_data()
index = GPTSimpleVectorIndex(documents)
index.save_to_disk('index.json')
def pdf_to_text(file_obj, progress=gr.Progress()):
progress(0.2, desc="Uploading file...")
with open(file_obj.name, "rb") as f:
progress(0.5, desc="Analyzing file...")
poller = form_recognizer_client.begin_analyze_document("prebuilt-document", f)
progress(0.8, desc="Applying OCR...")
result = poller.result()
f.close()
progress(0.9, desc="Azure OpenAI Magic...")
#save the result.content in a text file
with open("data/text.txt", "w") as f:
f.write(str(result.content))
f.close()
create_index()
progress(1.0, desc="Done!")
time.sleep(1.5)
return str(result.content), gr.update(visible=True), gr.update(visible=False)
chat = ChatWrapper()
with gr.Blocks(css="footer {visibility: hidden;}") as demo:
chat_history_state = gr.State()
pdf_content = gr.State()
gr.Markdown("""
<sub><sup>created by [@shamill](https://whoplus.microsoft.com/?_vwp=true&_vwpAlias=SHAMILL)</sup></sub>
# Customized GPT-3 Chatbot
GPT-3.5 is a powerful language model, it can be used to create a chatbot that can have a conversation with you. This demo allows you to customize the context of the conversation, and the chatbot will stick to the confines of the context you provide, avoiding made up answers. The chatbot is powered by Azure's OpenAI GPT-3 API.""")
### this is where they will upload the pdf
with gr.Column(visible=False) as chat_interface:
with gr.Row():
chatbot = gr.Chatbot()
with gr.Row():
message_box = gr.Textbox(lines=2, placeholder="Type a message...", default="Hi there!")
submit_button = gr.Button("Submit").style(full_width=False)
submit_button.click(chat, inputs=[message_box, chat_history_state], outputs=[chatbot, chat_history_state])
with gr.Column(visible=True) as upload_interface:
with gr.Row():
upload = gr.File(fn=pdf_to_text, label="Upload a context pdf file", type="file")
with gr.Row():
button = gr.Button("Upload").style(full_width=False)
with gr.Row():
loadingbox = gr.Textbox("Status", visible=False)
button.click(make_status_box_visible, outputs=[loadingbox, button])
button.click(pdf_to_text, inputs=[upload], outputs=[loadingbox, chat_interface, upload_interface])
demo.queue(concurrency_count=20).launch(share=True)