File size: 2,208 Bytes
0c7add2 e3cacda 0c7add2 e3cacda |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
import os
from typing import Optional, Tuple
import gradio as gr
import argparse
import datetime
import pickle
#import whisper
import dotenv
import sys
from io import StringIO
import re
dotenv.load_dotenv()
from langchain.callbacks import get_openai_callback
import hydra
from omegaconf import DictConfig, open_dict, OmegaConf
class ChatbotAgentGradio():
def __init__(
self,
config_name
):
config = OmegaConf.load(f'./config/{config_name}.yaml')
self.chatbot = hydra.utils.instantiate(config.model, _convert_="partial")
def chat(self,
inp: str,
history: Optional[Tuple[str, str]],
):
"""Method for integration with gradio Chatbot"""
print("\n==== date/time: " + str(datetime.datetime.now()) + " ====")
print("inp: " + inp)
history = history or []
output = self.chatbot.run(inp)
history.append((inp, output))
return history, history#, ""
def update_foo(self, widget, state):
if widget:
state = widget
return state
def launch_app(self):
block = gr.Blocks(css=".gradio-container {background-color: lightgray}")
with block:
instance = gr.State()
show_chain_state = gr.State(False)
with gr.Row():
gr.Markdown("<h3><center>UNHCR</center></h3>")
with gr.Row():
chatbot = gr.Chatbot()
with gr.Row():
message = gr.Textbox(
label="What's your question?",
lines=1,
)
submit = gr.Button(value="Send", variant="secondary").style(full_width=False)
state = gr.State()
agent_state = gr.State()
submit.click(self.chat, inputs=[message, state], outputs=[chatbot, state])
message.submit(self.chat, inputs=[message, state], outputs=[chatbot, state])
block.launch()#, server_name='192.168.0.73', )
app = ChatbotAgentGradio('conf_0.1')
app.launch_app() |