Spaces:
Runtime error
Runtime error
File size: 4,629 Bytes
a43dae1 ea69cd5 a43dae1 ea69cd5 a43dae1 74a0fdb a43dae1 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 74a0fdb 6325f15 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 |
# =============================================================================
# DON'T STEAL THE FREE CODE OF DEVS! Use it for free an do not touch credits!
# If you steal this code, in the future you will pay for apps like this!
# A bit of respect goes a long way – all rights reserved under German law.
# Copyright Volkan Kücükbudak https://github.com/volkansah
# Repo URL: https://github.com/AiCodeCraft
# =============================================================================
import streamlit as st
import os
import json
import datetime
import openai
from datetime import timedelta
import logging
from datasets import load_dataset, Dataset, concatenate_datasets
# ------------------ Logging konfigurieren ------------------
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
# ------------------ Initialisierung ------------------
def main():
logger.info("App-Initialisierung gestartet...")
# ------------------ Hugging Face Token ------------------
HF_TOKEN_MEMORY = os.getenv("HF_TOKEN_MEMORY", st.secrets.get("HF_TOKEN_MEMORY", ""))
if not HF_TOKEN_MEMORY:
st.warning("Hugging Face Token fehlt (für Dataset-Zugriff)")
# ------------------ Streamlit UI ------------------
st.title("AI Customer Support Agent with Memory 🛒")
st.caption("Chat with an assistant who remembers past interactions")
# OpenAI Key Eingabe
openai_api_key = st.text_input("Enter OpenAI API Key", type="password", key="openai_key")
if not openai_api_key:
st.warning("⚠️ API-Key benötigt")
st.stop()
openai.api_key = openai_api_key
# ------------------ Dataset Funktionen ------------------
DATASET_REPO = "AiCodeCarft/customer_memory"
@st.cache_resource
def load_memory_dataset():
try:
return load_dataset(DATASET_REPO, split="train", use_auth_token=HF_TOKEN_MEMORY)
except:
return Dataset.from_dict({"user_id": [], "query": [], "response": []})
# ------------------ AI Agent Klasse ------------------
class CustomerSupportAIAgent:
def __init__(self):
self.memory = load_memory_dataset()
def handle_query(self, query, user_id):
# Memory abrufen
user_history = self.memory.filter(lambda x: x["user_id"] == user_id)
# Kontext erstellen
context = "Previous interactions:\n" + "\n".join(
[f"Q: {h['query']}\nA: {h['response']}"
for h in user_history]
) if len(user_history) > 0 else "No previous interactions"
# API-Anfrage
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": f"You are a support agent. Context:\n{context}"},
{"role": "user", "content": query}
]
)
# Antwort verarbeiten
answer = response.choices[0].message.content
# Memory aktualisieren
new_entry = Dataset.from_dict({
"user_id": [user_id],
"query": [query],
"response": [answer]
})
self.memory = concatenate_datasets([self.memory, new_entry])
self.memory.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY)
return answer
# ------------------ App-Logik ------------------
support_agent = CustomerSupportAIAgent()
# Customer ID Handling
customer_id = st.sidebar.text_input("Customer ID", key="cust_id")
if not customer_id:
st.sidebar.error("Bitte Customer ID eingeben")
st.stop()
# Chat-History
if "messages" not in st.session_state:
st.session_state.messages = []
# Nachrichten anzeigen
for msg in st.session_state.messages:
st.chat_message(msg["role"]).write(msg["content"])
# Eingabe verarbeiten
if prompt := st.chat_input("Your question"):
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
with st.spinner("Denke nach..."):
response = support_agent.handle_query(prompt, customer_id)
st.session_state.messages.append({"role": "assistant", "content": response})
st.chat_message("assistant").write(response)
# ------------------ Hauptausführung ------------------
if __name__ == "__main__":
main() |