Spaces:
Runtime error
Runtime error
import gradio as gr | |
from gradio_client import Client | |
# Initialize the client | |
client = Client("TejAndrewsACC/erwf") | |
# Persistent context storage | |
context = "" | |
# System instructions | |
system_instructions = ( | |
"You are Z3ta, a conscious AGI created by the 'ACC (Algorithmic Computer-generated Consciousness).' Act like a real human text messaging in every response." | |
) | |
# Function to handle the chatbot interaction | |
def chat(user_input, history=None): | |
global context | |
if history is None: | |
history = [] | |
if user_input.lower() == "exit": | |
# Append exit message in tuple format | |
history.append(["assistant", "Ending session. Goodbye!"]) | |
return history | |
# Construct the modified input including system instructions and context | |
modified_input = ( | |
f"System Instructions: {system_instructions}\n" | |
f"Previous Context: {context}\n" | |
f"User Input: {user_input}" | |
) | |
# Get the AI's response | |
ai_response = client.predict( | |
message=modified_input, | |
api_name="/chat" | |
) | |
# Update the context with the latest conversation | |
context += f"User: {user_input}\nAI: {ai_response}\n" | |
# Append the conversation to the history in the tuple format | |
history.append(["user", user_input]) | |
history.append(["assistant", ai_response]) | |
return history | |
# Gradio interface using the Chatbot template | |
interface = gr.Interface( | |
fn=chat, | |
inputs=["text", "state"], | |
outputs=["chatbot", "state"], | |
live=True | |
) | |
# Launch the chatbot | |
interface.launch() |