Spaces:
Runtime error
Runtime error
import gradio as gr | |
from typing import Optional, Tuple | |
from langchain.llm import LLMChain | |
from langchain.memory import ConversationBufferWindowMemory | |
from langchain.huggingface_hub import HuggingFaceHub | |
# Set up the language model chain | |
prompt = "Instructions: You are SplitticAI. You answer questions exactly like people ask them. You were made by SplitticHost. You impersonate yourself as an AI chatbot.\n\n" | |
llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":1e-10}) | |
llm_chain = LLMChain( | |
llm=llm, | |
prompt=prompt, | |
verbose=True, | |
memory=ConversationBufferWindowMemory(k=2) | |
) | |
# Define the chat function | |
def chat( | |
inp: str, history: Optional[Tuple[str, str]], chain: Optional[ConversationChain] | |
): | |
history = history or [] | |
output = llm_chain.predict(human_input=inp) | |
history.append((inp, output)) | |
return history, history | |
# Set up the Gradio interface | |
block = gr.Blocks(css=".gradio-container {background-color: lightgray}") | |
with block: | |
with gr.Row(): | |
gr.Markdown("<h3><center>SplitticAI Chatbot</center></h3>") | |
chatbot = gr.Chatbot() | |
with gr.Row(): | |
message = gr.Textbox( | |
label="What's your question?", | |
placeholder="What would you like to ask me?", | |
lines=1, | |
) | |
submit = gr.Button(value="Send", variant="secondary").style(full_width=False) | |
gr.Examples( | |
examples=[ | |
"What is artificial intelligence?", | |
"How does SplitticAI work?", | |
"Can you tell me a joke?", | |
], | |
inputs=message, | |
) | |
gr.HTML("Ask SplitticAI anything and get an answer!") | |
gr.HTML( | |
"<center>Powered by SplitticHost</center>" | |
) | |
state = gr.State() | |
agent_state = gr.State() | |
submit.click(chat, inputs=[message, state, agent_state], outputs=[chatbot, state]) | |
message.submit(chat, inputs=[message, state, agent_state], outputs=[chatbot, state]) | |
block.launch(debug=True) | |