Spaces:
Runtime error
Runtime error
# ============================================================================= | |
# DON'T STEAL THE FREE CODE OF DEVS! Use it for free an do not touch credits! | |
# If you steal this code, in the future you will pay for apps like this! | |
# A bit of respect goes a long way – all rights reserved under German law. | |
# Copyright Volkan Kücükbudak https://github.com/volkansah | |
# Repo URL: https://github.com/AiCodeCraft | |
# ============================================================================= | |
import streamlit as st | |
import os | |
import json | |
import datetime | |
import openai | |
from datetime import timedelta | |
import logging | |
from datasets import load_dataset, Dataset, concatenate_datasets | |
# ------------------ Logging konfigurieren ------------------ | |
logging.basicConfig( | |
level=logging.INFO, # Log-Level auf INFO setzen | |
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' | |
) | |
logger = logging.getLogger(__name__) | |
logger.info("Starte App mit HF-Dataset Memory...") | |
# ------------------ Hugging Face Token laden ------------------ | |
HF_TOKEN_MEMORY = os.getenv('HF_TOKEN_MEMORY', '').strip() | |
if HF_TOKEN_MEMORY: | |
logger.info("Hugging Face Token gefunden.") | |
else: | |
logger.warning("Kein Hugging Face Token gefunden. Falls benötigt, bitte setzen!") | |
# ------------------ Einstellungen für das Memory-Dataset ------------------ | |
DATASET_REPO = "AiCodeCarft/customer_memory" | |
def load_memory_dataset(): | |
""" | |
Versucht, das Memory-Dataset vom HF Hub zu laden. | |
Falls nicht vorhanden, wird ein leeres Dataset mit den Spalten | |
'user_id', 'query' und 'response' erstellt und gepusht. | |
""" | |
try: | |
ds = load_dataset(DATASET_REPO, split="train") | |
st.write("Dataset loaded from HF Hub.") | |
logger.info("Dataset erfolgreich vom HF Hub geladen.") | |
except Exception as e: | |
st.write("Dataset not found on HF Hub. Creating a new one...") | |
logger.info("Kein Dataset gefunden. Erstelle ein neues Dataset...") | |
data = {"user_id": [], "query": [], "response": []} | |
ds = Dataset.from_dict(data) | |
ds.push_to_hub(DATASET_REPO) | |
st.write("New dataset created and pushed to HF Hub.") | |
logger.info("Neues Dataset erfolgreich erstellt und gepusht.") | |
return ds | |
def add_to_memory(user_id, query, response): | |
""" | |
Fügt einen neuen Eintrag (Query und Antwort) zum Memory-Dataset hinzu | |
und pusht das aktualisierte Dataset an den HF Hub. | |
""" | |
ds = load_memory_dataset() | |
# Neuer Eintrag als kleines Dataset | |
new_entry = Dataset.from_dict({ | |
"user_id": [user_id], | |
"query": [query], | |
"response": [response] | |
}) | |
# Bestehendes Dataset mit dem neuen Eintrag zusammenführen | |
updated_ds = concatenate_datasets([ds, new_entry]) | |
# Push updated dataset to HF Hub | |
updated_ds.push_to_hub(DATASET_REPO) | |
st.write("Memory updated.") | |
logger.info("Memory-Dataset erfolgreich aktualisiert.") | |
def get_memory(user_id): | |
""" | |
Filtert das Memory-Dataset nach der angegebenen Customer ID | |
und gibt alle Einträge (Query und Antwort) zurück. | |
""" | |
ds = load_memory_dataset() | |
filtered_ds = ds.filter(lambda x: x["user_id"] == user_id) | |
logger.info(f"Memory für User {user_id} abgerufen. {len(filtered_ds)} Einträge gefunden.") | |
return filtered_ds | |
# ------------------ OpenAI GPT-4 API-Anbindung ------------------ | |
def generate_response(prompt): | |
""" | |
Sendet den Prompt an die OpenAI API und gibt die Antwort zurück. | |
""" | |
response = openai.ChatCompletion.create( | |
model="gpt-3.5-turbo", | |
messages=[ | |
{"role": "system", "content": "You are a customer support AI for TechGadgets.com."}, | |
{"role": "user", "content": prompt} | |
] | |
) | |
logger.info("Antwort von OpenAI erhalten.") | |
return response.choices[0].message.content | |
# ------------------ Streamlit App UI ------------------ | |
st.title("AI Customer Support Agent with Memory 🛒") | |
st.caption("Chat with a customer support assistant who remembers your past interactions.") | |
# Eingabe des OpenAI API Keys | |
openai_api_key = st.text_input("Enter OpenAI API Key", type="password") | |
if openai_api_key: | |
os.environ['OPENAI_API_KEY'] = openai_api_key | |
openai.api_key = openai_api_key | |
logger.info("OpenAI API Key gesetzt.") | |
# ------------------ Klasse: CustomerSupportAIAgent ------------------ | |
class CustomerSupportAIAgent: | |
def __init__(self): | |
# Wir nutzen hier die HF Dataset Funktionen als Memory-Speicher | |
self.client = openai # OpenAI Client | |
self.app_id = "customer-support" | |
def handle_query(self, query, user_id=None): | |
try: | |
# Hole relevante Erinnerungen aus dem HF Dataset | |
memories = get_memory(user_id) | |
context = "Relevant past information:\n" | |
# Falls Einträge vorhanden sind, baue den Kontext | |
if len(memories) > 0: | |
for entry in memories: | |
context += f"- Query: {entry['query']}\n Response: {entry['response']}\n" | |
logger.info("Kontext aus Memory-Dataset erstellt.") | |
# Kombiniere Kontext und aktuelle Anfrage | |
full_prompt = f"{context}\nCustomer: {query}\nSupport Agent:" | |
logger.info("Vollständiger Prompt für OpenAI erstellt.") | |
# Generiere Antwort mit OpenAI | |
answer = generate_response(full_prompt) | |
# Speicher die Interaktion im Memory-Dataset | |
add_to_memory(user_id, query, answer) | |
logger.info("Interaktion im Memory-Dataset gespeichert.") | |
return answer | |
except Exception as e: | |
logger.error(f"Fehler bei handle_query: {e}") | |
st.error(f"An error occurred while handling the query: {e}") | |
return "Sorry, I encountered an error. Please try again later." | |
def generate_synthetic_data(self, user_id: str) -> dict | None: | |
try: | |
today = datetime.datetime.now() | |
order_date = (today - timedelta(days=10)).strftime("%B %d, %Y") | |
expected_delivery = (today + timedelta(days=2)).strftime("%B %d, %Y") | |
# Prompt zur Generierung synthetischer Kundendaten für einen Lieferservice | |
prompt = f"""Generate a detailed customer profile and order history for a DeliverItExpress customer with ID {user_id}. Include: | |
1. Customer name and basic info (age, gender, and contact details) | |
2. A recent order of a gourmet meal (placed on {order_date} and delivered by {expected_delivery}) | |
3. Order details including food items, total price, and order number | |
4. Customer's delivery address | |
5. 2-3 previous orders from the past year with different types of cuisines | |
6. 2-3 customer service interactions regarding delivery issues (e.g., late delivery, missing items) | |
7. Any preferences or patterns in their ordering behavior (e.g., favorite cuisines, peak ordering times) | |
Format the output as a JSON object.""" | |
logger.info("Prompt for generating synthetic delivery service data created.") | |
response = self.client.ChatCompletion.create( | |
model="gpt-4", | |
messages=[ | |
{"role": "system", "content": "You are a data generation AI that creates realistic customer profiles and order histories. Always respond with valid JSON."}, | |
{"role": "user", "content": prompt} | |
] | |
) | |
logger.info("Antwort für synthetische Daten von OpenAI erhalten.") | |
customer_data = json.loads(response.choices[0].message.content) | |
# Optional: Speichere auch diese Daten im Memory-Dataset | |
for key, value in customer_data.items(): | |
if isinstance(value, list): | |
for item in value: | |
add_to_memory(user_id, f"{key} item", json.dumps(item)) | |
else: | |
add_to_memory(user_id, key, json.dumps(value)) | |
logger.info("Synthetische Daten im Memory-Dataset gespeichert.") | |
return customer_data | |
except Exception as e: | |
logger.error(f"Fehler bei generate_synthetic_data: {e}") | |
st.error(f"Failed to generate synthetic data: {e}") | |
return None | |
# ------------------ Initialisierung des CustomerSupportAIAgent ------------------ | |
support_agent = CustomerSupportAIAgent() | |
# ------------------ Sidebar: Customer ID und Optionen ------------------ | |
st.sidebar.title("Enter your Customer ID:") | |
previous_customer_id = st.session_state.get("previous_customer_id", None) | |
customer_id = st.sidebar.text_input("Enter your Customer ID") | |
if customer_id != previous_customer_id: | |
st.session_state.messages = [] | |
st.session_state.previous_customer_id = customer_id | |
st.session_state.customer_data = None | |
logger.info("Neue Customer ID erkannt – Chatverlauf und synthetische Daten zurückgesetzt.") | |
# Button: Synthetische Daten generieren | |
if st.sidebar.button("Generate Synthetic Data"): | |
if customer_id: | |
with st.spinner("Generating customer data..."): | |
st.session_state.customer_data = support_agent.generate_synthetic_data(customer_id) | |
if st.session_state.customer_data: | |
st.sidebar.success("Synthetic data generated successfully!") | |
logger.info("Synthetische Daten erfolgreich generiert.") | |
else: | |
st.sidebar.error("Failed to generate synthetic data.") | |
logger.error("Fehler beim Generieren synthetischer Daten.") | |
else: | |
st.sidebar.error("Please enter a customer ID first.") | |
logger.warning("Kein Customer ID eingegeben beim Versuch, synthetische Daten zu generieren.") | |
# Button: Customer Profile anzeigen | |
if st.sidebar.button("View Customer Profile"): | |
if st.session_state.customer_data: | |
st.sidebar.json(st.session_state.customer_data) | |
else: | |
st.sidebar.info("No customer data generated yet. Click 'Generate Synthetic Data' first.") | |
# Button: Memory-Info anzeigen | |
if st.sidebar.button("View Memory Info"): | |
if customer_id: | |
memories = get_memory(customer_id) | |
st.sidebar.write(f"Memory for customer **{customer_id}**:") | |
for mem in memories: | |
st.sidebar.write(f"**Query:** {mem['query']}\n**Response:** {mem['response']}\n---") | |
else: | |
st.sidebar.error("Please enter a customer ID.") | |
# ------------------ Chatverlauf initialisieren und anzeigen ------------------ | |
if "messages" not in st.session_state: | |
st.session_state.messages = [] | |
# Vorherige Nachrichten anzeigen | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
# ------------------ Haupt-Chat: Benutzereingabe ------------------ | |
query = st.chat_input("How can I assist you today?") | |
if query and customer_id: | |
# Benutzeranfrage zum Chatverlauf hinzufügen | |
st.session_state.messages.append({"role": "user", "content": query}) | |
with st.chat_message("user"): | |
st.markdown(query) | |
logger.info("Benutzeranfrage hinzugefügt.") | |
# Generiere Antwort und zeige sie an | |
with st.spinner("Generating response..."): | |
answer = support_agent.handle_query(query, user_id=customer_id) | |
st.session_state.messages.append({"role": "assistant", "content": answer}) | |
with st.chat_message("assistant"): | |
st.markdown(answer) | |
logger.info("Antwort des Assistenten hinzugefügt.") | |
elif query and not customer_id: | |
st.error("Please enter a customer ID to start the chat.") | |
logger.warning("Chat gestartet ohne Customer ID.") | |
else: | |
st.warning("Please enter your OpenAI API key to use the customer support agent.") | |
logger.info("Warte auf Eingabe des OpenAI API Keys.") | |