Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -20,121 +20,104 @@ logging.basicConfig(
|
|
20 |
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
21 |
)
|
22 |
logger = logging.getLogger(__name__)
|
23 |
-
logger.info("Starte App mit HF-Dataset Memory...")
|
24 |
|
25 |
-
# ------------------
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
ds.push_to_hub(DATASET_REPO)
|
44 |
-
logger.info("Neues Dataset erfolgreich erstellt und gepusht.")
|
45 |
-
return ds
|
46 |
-
|
47 |
-
def add_to_memory(user_id, query, response):
|
48 |
-
ds = load_memory_dataset()
|
49 |
-
new_entry = Dataset.from_dict({
|
50 |
-
"user_id": [user_id],
|
51 |
-
"query": [query],
|
52 |
-
"response": [response]
|
53 |
-
})
|
54 |
-
updated_ds = concatenate_datasets([ds, new_entry])
|
55 |
-
updated_ds.push_to_hub(DATASET_REPO)
|
56 |
-
logger.info("Memory-Dataset erfolgreich aktualisiert.")
|
57 |
-
|
58 |
-
def get_memory(user_id):
|
59 |
-
ds = load_memory_dataset()
|
60 |
-
filtered_ds = ds.filter(lambda x: x["user_id"] == user_id)
|
61 |
-
logger.info(f"Memory für User {user_id} abgerufen. {len(filtered_ds)} Einträge gefunden.")
|
62 |
-
return filtered_ds
|
63 |
-
|
64 |
-
# ------------------ Streamlit App UI ------------------
|
65 |
-
st.title("AI Customer Support Agent with Memory 🛒")
|
66 |
-
st.caption("Chat with a customer support assistant who remembers your past interactions.")
|
67 |
-
|
68 |
-
# OpenAI API Key Eingabe oben in der Haupt-UI
|
69 |
-
openai_api_key = st.text_input("Enter OpenAI API Key", type="password")
|
70 |
-
|
71 |
-
if not openai_api_key:
|
72 |
-
st.warning("⚠️ Please enter your OpenAI API key to continue.")
|
73 |
-
st.stop()
|
74 |
|
75 |
-
openai.api_key = openai_api_key
|
76 |
|
77 |
-
# ------------------
|
78 |
-
|
79 |
-
def __init__(self):
|
80 |
-
self.client = openai
|
81 |
-
self.app_id = "customer-support"
|
82 |
|
83 |
-
|
|
|
84 |
try:
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
90 |
|
91 |
-
|
|
|
|
|
|
|
|
|
92 |
|
93 |
-
# API-
|
94 |
-
|
95 |
model="gpt-3.5-turbo",
|
96 |
messages=[
|
97 |
-
{"role": "system", "content": "You are a
|
98 |
-
{"role": "user", "content":
|
99 |
]
|
100 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
|
102 |
-
add_to_memory(user_id, query, answer)
|
103 |
return answer
|
104 |
-
except Exception as e:
|
105 |
-
logger.error(f"Fehler bei handle_query: {e}")
|
106 |
-
return "Sorry, I encountered an error. Please try again later."
|
107 |
-
|
108 |
-
# ------------------ Initialisierung ------------------
|
109 |
-
support_agent = CustomerSupportAIAgent()
|
110 |
|
111 |
-
# ------------------
|
112 |
-
|
113 |
-
st.title("Customer ID")
|
114 |
-
customer_id = st.text_input("Enter your Customer ID", key="customer_id")
|
115 |
-
|
116 |
-
if 'customer_id' in st.session_state and st.session_state.customer_id:
|
117 |
-
if st.button("Generate Synthetic Data"):
|
118 |
-
# ... (deine bestehende Synthetic Data Logik)
|
119 |
-
|
120 |
-
# ------------------ Chat-History Management ------------------
|
121 |
-
if "messages" not in st.session_state:
|
122 |
-
st.session_state.messages = []
|
123 |
|
124 |
-
#
|
125 |
-
|
126 |
if not customer_id:
|
127 |
-
st.error("
|
128 |
st.stop()
|
129 |
-
|
130 |
-
st.session_state.messages.append({"role": "user", "content": prompt})
|
131 |
-
|
132 |
-
with st.spinner("Generating response..."):
|
133 |
-
response = support_agent.handle_query(prompt, customer_id)
|
134 |
-
|
135 |
-
st.session_state.messages.append({"role": "assistant", "content": response})
|
136 |
|
137 |
-
#
|
138 |
-
|
139 |
-
|
140 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
21 |
)
|
22 |
logger = logging.getLogger(__name__)
|
|
|
23 |
|
24 |
+
# ------------------ Initialisierung ------------------
|
25 |
+
def main():
|
26 |
+
logger.info("App-Initialisierung gestartet...")
|
27 |
+
|
28 |
+
# ------------------ Hugging Face Token ------------------
|
29 |
+
HF_TOKEN_MEMORY = os.getenv("HF_TOKEN_MEMORY", st.secrets.get("HF_TOKEN_MEMORY", ""))
|
30 |
+
if not HF_TOKEN_MEMORY:
|
31 |
+
st.warning("Hugging Face Token fehlt (für Dataset-Zugriff)")
|
32 |
+
|
33 |
+
# ------------------ Streamlit UI ------------------
|
34 |
+
st.title("AI Customer Support Agent with Memory 🛒")
|
35 |
+
st.caption("Chat with an assistant who remembers past interactions")
|
36 |
+
|
37 |
+
# OpenAI Key Eingabe
|
38 |
+
openai_api_key = st.text_input("Enter OpenAI API Key", type="password", key="openai_key")
|
39 |
+
if not openai_api_key:
|
40 |
+
st.warning("⚠️ API-Key benötigt")
|
41 |
+
st.stop()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
|
43 |
+
openai.api_key = openai_api_key
|
44 |
|
45 |
+
# ------------------ Dataset Funktionen ------------------
|
46 |
+
DATASET_REPO = "AiCodeCarft/customer_memory"
|
|
|
|
|
|
|
47 |
|
48 |
+
@st.cache_resource
|
49 |
+
def load_memory_dataset():
|
50 |
try:
|
51 |
+
return load_dataset(DATASET_REPO, split="train", use_auth_token=HF_TOKEN_MEMORY)
|
52 |
+
except:
|
53 |
+
return Dataset.from_dict({"user_id": [], "query": [], "response": []})
|
54 |
+
|
55 |
+
# ------------------ AI Agent Klasse ------------------
|
56 |
+
class CustomerSupportAIAgent:
|
57 |
+
def __init__(self):
|
58 |
+
self.memory = load_memory_dataset()
|
59 |
+
|
60 |
+
def handle_query(self, query, user_id):
|
61 |
+
# Memory abrufen
|
62 |
+
user_history = self.memory.filter(lambda x: x["user_id"] == user_id)
|
63 |
|
64 |
+
# Kontext erstellen
|
65 |
+
context = "Previous interactions:\n" + "\n".join(
|
66 |
+
[f"Q: {h['query']}\nA: {h['response']}"
|
67 |
+
for h in user_history]
|
68 |
+
) if len(user_history) > 0 else "No previous interactions"
|
69 |
|
70 |
+
# API-Anfrage
|
71 |
+
response = openai.ChatCompletion.create(
|
72 |
model="gpt-3.5-turbo",
|
73 |
messages=[
|
74 |
+
{"role": "system", "content": f"You are a support agent. Context:\n{context}"},
|
75 |
+
{"role": "user", "content": query}
|
76 |
]
|
77 |
+
)
|
78 |
+
|
79 |
+
# Antwort verarbeiten
|
80 |
+
answer = response.choices[0].message.content
|
81 |
+
|
82 |
+
# Memory aktualisieren
|
83 |
+
new_entry = Dataset.from_dict({
|
84 |
+
"user_id": [user_id],
|
85 |
+
"query": [query],
|
86 |
+
"response": [answer]
|
87 |
+
})
|
88 |
+
self.memory = concatenate_datasets([self.memory, new_entry])
|
89 |
+
self.memory.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY)
|
90 |
|
|
|
91 |
return answer
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
|
93 |
+
# ------------------ App-Logik ------------------
|
94 |
+
support_agent = CustomerSupportAIAgent()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
|
96 |
+
# Customer ID Handling
|
97 |
+
customer_id = st.sidebar.text_input("Customer ID", key="cust_id")
|
98 |
if not customer_id:
|
99 |
+
st.sidebar.error("Bitte Customer ID eingeben")
|
100 |
st.stop()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
|
102 |
+
# Chat-History
|
103 |
+
if "messages" not in st.session_state:
|
104 |
+
st.session_state.messages = []
|
105 |
+
|
106 |
+
# Nachrichten anzeigen
|
107 |
+
for msg in st.session_state.messages:
|
108 |
+
st.chat_message(msg["role"]).write(msg["content"])
|
109 |
+
|
110 |
+
# Eingabe verarbeiten
|
111 |
+
if prompt := st.chat_input("Your question"):
|
112 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
113 |
+
st.chat_message("user").write(prompt)
|
114 |
+
|
115 |
+
with st.spinner("Denke nach..."):
|
116 |
+
response = support_agent.handle_query(prompt, customer_id)
|
117 |
+
|
118 |
+
st.session_state.messages.append({"role": "assistant", "content": response})
|
119 |
+
st.chat_message("assistant").write(response)
|
120 |
+
|
121 |
+
# ------------------ Hauptausführung ------------------
|
122 |
+
if __name__ == "__main__":
|
123 |
+
main()
|