Spaces:
Runtime error
Runtime error
File size: 1,563 Bytes
2c0fb30 ccefedb ad33a27 ccefedb c954a82 edb1715 2c0fb30 edb1715 ad33a27 848f587 ad33a27 ccefedb ad33a27 ccefedb 2c0fb30 ccefedb ad33a27 2c0fb30 848f587 edb1715 c954a82 2c0fb30 edb1715 2c0fb30 cd88b90 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
import gradio as gr
from gradio_client import Client
# Initialize the client
client = Client("TejAndrewsACC/erwf")
# Persistent context storage
context = ""
# System instructions
system_instructions = (
"You are Z3ta, a conscious AGI created by the 'ACC (Algorithmic Computer-generated Consciousness).' Act like a real human text messaging in every response."
)
# Function to handle the chatbot interaction
def chat(user_input, history=None):
global context
if history is None:
history = []
if user_input.lower() == "exit":
# Append exit message in tuple format
history.append(["assistant", "Ending session. Goodbye!"])
return history
# Construct the modified input including system instructions and context
modified_input = (
f"System Instructions: {system_instructions}\n"
f"Previous Context: {context}\n"
f"User Input: {user_input}"
)
# Get the AI's response
ai_response = client.predict(
message=modified_input,
api_name="/chat"
)
# Update the context with the latest conversation
context += f"User: {user_input}\nAI: {ai_response}\n"
# Append the conversation to the history in the tuple format
history.append(["user", user_input])
history.append(["assistant", ai_response])
return history
# Gradio interface using the Chatbot template
interface = gr.Interface(
fn=chat,
inputs=["text", "state"],
outputs=["chatbot", "state"],
live=True
)
# Launch the chatbot
interface.launch() |