# ============================================================================= # DON'T STEAL THE FREE CODE OF DEVS! Use it for free an do not touch credits! # If you steal this code, in the future you will pay for apps like this! # A bit of respect goes a long way – all rights reserved under German law. # Copyright Volkan Kücükbudak https://github.com/volkansah # Repo URL: https://github.com/AiCodeCraft # ============================================================================= import streamlit as st import os import json import datetime import openai from datetime import timedelta import logging from datasets import load_dataset, Dataset, concatenate_datasets # ------------------ Logging konfigurieren ------------------ logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) logger = logging.getLogger(__name__) # ------------------ Initialisierung ------------------ def main(): logger.info("App-Initialisierung gestartet...") # ------------------ Hugging Face Token ------------------ HF_TOKEN_MEMORY = os.getenv("HF_TOKEN_MEMORY", st.secrets.get("HF_TOKEN_MEMORY", "")) if not HF_TOKEN_MEMORY: st.warning("Hugging Face Token fehlt (für Dataset-Zugriff)") # ------------------ Streamlit UI ------------------ st.title("AI Customer Support Agent with Memory 🛒") st.caption("Chat with an assistant who remembers past interactions") # OpenAI Key Eingabe openai_api_key = st.text_input("Enter OpenAI API Key", type="password", key="openai_key") if not openai_api_key: st.warning("⚠️ API-Key benötigt") st.stop() openai.api_key = openai_api_key # ------------------ Dataset Funktionen ------------------ DATASET_REPO = "AiCodeCarft/customer_memory" @st.cache_resource def load_memory_dataset(): """ Versucht, das Memory-Dataset vom HF Hub zu laden. Falls nicht vorhanden, wird ein leeres Dataset erstellt und gepusht. """ try: # Versuche das Dataset zu laden mit Token-Authentifizierung logger.info(f"Versuche Dataset {DATASET_REPO} zu laden...") ds = load_dataset(DATASET_REPO, split="train", token=HF_TOKEN_MEMORY) logger.info(f"Dataset erfolgreich geladen mit {len(ds)} Einträgen.") return ds except Exception as e: logger.warning(f"Fehler beim Laden des Datasets: {str(e)}") # Erstelle ein leeres Dataset, wenn keines existiert logger.info("Erstelle neues Dataset...") data = {"user_id": [], "query": [], "response": [], "timestamp": []} ds = Dataset.from_dict(data) try: # Versuche das Dataset zu pushen ds.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY) logger.info("Neues Dataset erfolgreich erstellt und gepusht.") return ds except Exception as push_error: logger.error(f"Fehler beim Pushen des Datasets: {str(push_error)}") st.error("Konnte kein Dataset erstellen. Bitte überprüfe deine Berechtigungen.") # Fallback: Lokales Dataset zurückgeben return ds # ------------------ AI Agent Klasse ------------------ class CustomerSupportAIAgent: def __init__(self): self.memory = load_memory_dataset() def handle_query(self, query, user_id): # Memory abrufen user_history = self.memory.filter(lambda x: x["user_id"] == user_id) # Kontext erstellen context = "Previous interactions:\n" + "\n".join( [f"Q: {h['query']}\nA: {h['response']}" for h in user_history] ) if len(user_history) > 0 else "No previous interactions" # API-Anfrage response = openai.ChatCompletion.create( model="gpt-3.5-turbo", messages=[ {"role": "system", "content": f"You are a support agent. Context:\n{context}"}, {"role": "user", "content": query} ] ) # Antwort verarbeiten answer = response.choices[0].message.content # Memory aktualisieren new_entry = Dataset.from_dict({ "user_id": [user_id], "query": [query], "response": [answer] }) self.memory = concatenate_datasets([self.memory, new_entry]) self.memory.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY) return answer # ------------------ App-Logik ------------------ support_agent = CustomerSupportAIAgent() # Customer ID Handling customer_id = st.sidebar.text_input("Customer ID", key="cust_id") if not customer_id: st.sidebar.error("Bitte Customer ID eingeben") st.stop() # Chat-History if "messages" not in st.session_state: st.session_state.messages = [] # Nachrichten anzeigen for msg in st.session_state.messages: st.chat_message(msg["role"]).write(msg["content"]) # Eingabe verarbeiten if prompt := st.chat_input("Your question"): st.session_state.messages.append({"role": "user", "content": prompt}) st.chat_message("user").write(prompt) with st.spinner("Denke nach..."): response = support_agent.handle_query(prompt, customer_id) st.session_state.messages.append({"role": "assistant", "content": response}) st.chat_message("assistant").write(response) # ------------------ Hauptausführung ------------------ if __name__ == "__main__": main()