Spaces:
Runtime error
Runtime error
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# =============================================================================
|
2 |
+
# DON'T STEAL THE FREE CODE OF DEVS! Use it for free an do not touch credits!
|
3 |
+
# If you steal this code, in the future you will pay for apps like this!
|
4 |
+
# A bit of respect goes a long way – all rights reserved under German law.
|
5 |
+
# Copyright Volkan Kücükbudak https://github.com/volkansah
|
6 |
+
# Repo URL: https://github.com/AiCodeCraft
|
7 |
+
# =============================================================================
|
8 |
+
import streamlit as st
|
9 |
+
import os
|
10 |
+
import json
|
11 |
+
import datetime
|
12 |
+
import openai
|
13 |
+
from datetime import timedelta
|
14 |
+
import logging
|
15 |
+
from datasets import load_dataset, Dataset, concatenate_datasets
|
16 |
+
import sys
|
17 |
+
import schedule
|
18 |
+
import threading
|
19 |
+
import time
|
20 |
+
|
21 |
+
print("Python Version:", sys.version)
|
22 |
+
print("Importiere Module...")
|
23 |
+
|
24 |
+
# ------------------ Logging konfigurieren ------------------
|
25 |
+
logging.basicConfig(
|
26 |
+
level=logging.INFO,
|
27 |
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
28 |
+
)
|
29 |
+
logger = logging.getLogger(__name__)
|
30 |
+
|
31 |
+
# ------------------ Initialisierung ------------------
|
32 |
+
# Als globale Variable definieren
|
33 |
+
HF_TOKEN_MEMORY = ""
|
34 |
+
|
35 |
+
def main():
|
36 |
+
global HF_TOKEN_MEMORY # Als global markieren
|
37 |
+
logger.info("App-Initialisierung gestartet...")
|
38 |
+
|
39 |
+
# ------------------ Hugging Face Token ------------------
|
40 |
+
HF_TOKEN_MEMORY = os.getenv("HF_TOKEN_MEMORY", "")
|
41 |
+
if not HF_TOKEN_MEMORY:
|
42 |
+
try:
|
43 |
+
HF_TOKEN_MEMORY = st.secrets["HF_TOKEN_MEMORY"]
|
44 |
+
logger.info("Token aus Streamlit Secrets geladen")
|
45 |
+
except Exception as e:
|
46 |
+
logger.warning(f"Token nicht in Streamlit Secrets gefunden: {str(e)}")
|
47 |
+
if HF_TOKEN_MEMORY:
|
48 |
+
token_preview = HF_TOKEN_MEMORY[:4] + "..." if len(HF_TOKEN_MEMORY) > 4 else "Ungültig"
|
49 |
+
logger.info(f"HF Token gefunden. Startet mit: {token_preview}")
|
50 |
+
else:
|
51 |
+
logger.warning("Kein HF Token gefunden! Dataset-Funktionen werden nicht verfügbar sein.")
|
52 |
+
|
53 |
+
# ------------------ Streamlit UI ------------------
|
54 |
+
st.title("AI Customer Support Agent with Memory 🛒")
|
55 |
+
st.caption("Chat with an assistant who remembers past interactions")
|
56 |
+
|
57 |
+
# OpenAI Key Eingabe
|
58 |
+
openai_api_key = st.text_input("Enter OpenAI API Key", type="password", key="openai_key")
|
59 |
+
if not openai_api_key:
|
60 |
+
logger.warning("API-Key benötigt")
|
61 |
+
st.stop()
|
62 |
+
|
63 |
+
openai.api_key = openai_api_key
|
64 |
+
|
65 |
+
# ------------------ Dataset Funktionen ------------------
|
66 |
+
DATASET_REPO = "AiCodeCraft/customer_memory" # Repository existiert bereits
|
67 |
+
|
68 |
+
@st.cache_resource
|
69 |
+
def load_memory_dataset():
|
70 |
+
"""
|
71 |
+
Versucht, das Memory-Dataset vom HF Hub zu laden.
|
72 |
+
Falls nicht vorhanden, wird ein leeres Dataset erstellt und gepusht.
|
73 |
+
"""
|
74 |
+
if not HF_TOKEN_MEMORY:
|
75 |
+
logger.warning("Kein HF Token vorhanden, verwende lokales Dataset")
|
76 |
+
return Dataset.from_dict({"user_id": [], "query": [], "response": [], "timestamp": []})
|
77 |
+
|
78 |
+
try:
|
79 |
+
logger.info(f"Versuche Dataset {DATASET_REPO} zu laden...")
|
80 |
+
ds = load_dataset(DATASET_REPO, split="train", token=HF_TOKEN_MEMORY)
|
81 |
+
logger.info(f"Dataset erfolgreich geladen mit {len(ds)} Einträgen.")
|
82 |
+
return ds
|
83 |
+
except Exception as e:
|
84 |
+
logger.warning(f"Fehler beim Laden des Datasets: {str(e)}")
|
85 |
+
logger.info("Erstelle neues Dataset...")
|
86 |
+
data = {"user_id": [], "query": [], "response": [], "timestamp": []}
|
87 |
+
ds = Dataset.from_dict(data)
|
88 |
+
try:
|
89 |
+
ds.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY)
|
90 |
+
logger.info("Neues Dataset erfolgreich erstellt und gepusht.")
|
91 |
+
except Exception as push_error:
|
92 |
+
logger.error(f"Fehler beim Pushen des Datasets: {str(push_error)}")
|
93 |
+
return ds
|
94 |
+
|
95 |
+
# ------------------ AI Agent Klasse ------------------
|
96 |
+
class CustomerSupportAIAgent:
|
97 |
+
def __init__(self):
|
98 |
+
self.memory = load_memory_dataset()
|
99 |
+
self.schedule_push()
|
100 |
+
|
101 |
+
def schedule_push(self):
|
102 |
+
# Diese Funktion pusht das Dataset periodisch im Hintergrund
|
103 |
+
def push_memory():
|
104 |
+
if HF_TOKEN_MEMORY:
|
105 |
+
try:
|
106 |
+
self.memory.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY)
|
107 |
+
logger.info("Memory erfolgreich aktualisiert (periodischer Push)")
|
108 |
+
except Exception as e:
|
109 |
+
logger.error(f"Fehler beim periodischen Aktualisieren des Datasets: {str(e)}")
|
110 |
+
|
111 |
+
schedule.every(10).minutes.do(push_memory)
|
112 |
+
threading.Thread(target=self.run_schedule, daemon=True).start()
|
113 |
+
|
114 |
+
def run_schedule(self):
|
115 |
+
while True:
|
116 |
+
schedule.run_pending()
|
117 |
+
time.sleep(1)
|
118 |
+
|
119 |
+
def handle_query(self, query, user_id):
|
120 |
+
# Memory abrufen
|
121 |
+
user_history = self.memory.filter(lambda x: x["user_id"] == user_id)
|
122 |
+
context = "Previous interactions:\n" + "\n".join(
|
123 |
+
[f"Q: {h['query']}\nA: {h['response']}" for h in user_history]
|
124 |
+
) if len(user_history) > 0 else "No previous interactions"
|
125 |
+
|
126 |
+
# API-Anfrage
|
127 |
+
try:
|
128 |
+
response = openai.ChatCompletion.create(
|
129 |
+
model="gpt-3.5-turbo",
|
130 |
+
messages=[
|
131 |
+
{"role": "system", "content": f"You are a support agent. Context:\n{context}"},
|
132 |
+
{"role": "user", "content": query}
|
133 |
+
]
|
134 |
+
)
|
135 |
+
answer = response.choices[0].message.content
|
136 |
+
except Exception as e:
|
137 |
+
logger.error(f"Fehler bei OpenAI-API-Anfrage: {str(e)}")
|
138 |
+
answer = "Entschuldigung, ein Fehler ist aufgetreten."
|
139 |
+
|
140 |
+
# Memory aktualisieren mit Timestamp
|
141 |
+
current_time = datetime.datetime.now().isoformat()
|
142 |
+
new_entry = Dataset.from_dict({
|
143 |
+
"user_id": [user_id],
|
144 |
+
"query": [query],
|
145 |
+
"response": [answer],
|
146 |
+
"timestamp": [current_time]
|
147 |
+
})
|
148 |
+
self.memory = concatenate_datasets([self.memory, new_entry])
|
149 |
+
logger.info(f"Memory lokal aktualisiert für User {user_id}")
|
150 |
+
# Der Push erfolgt periodisch im Hintergrund
|
151 |
+
return answer
|
152 |
+
|
153 |
+
# ------------------ App-Logik ------------------
|
154 |
+
support_agent = CustomerSupportAIAgent()
|
155 |
+
|
156 |
+
# Customer ID Handling
|
157 |
+
customer_id = st.sidebar.text_input("Customer ID", key="cust_id")
|
158 |
+
if not customer_id:
|
159 |
+
logger.warning("Keine Customer ID eingegeben")
|
160 |
+
st.stop()
|
161 |
+
|
162 |
+
# Chat-History initialisieren
|
163 |
+
if "messages" not in st.session_state:
|
164 |
+
st.session_state.messages = []
|
165 |
+
|
166 |
+
# Zeige nur die letzten 20 Nachrichten an
|
167 |
+
max_messages = 20
|
168 |
+
displayed_messages = st.session_state.messages[-max_messages:]
|
169 |
+
for msg in displayed_messages:
|
170 |
+
st.chat_message(msg["role"]).write(msg["content"])
|
171 |
+
|
172 |
+
# Eingabe verarbeiten
|
173 |
+
if prompt := st.chat_input("Your question"):
|
174 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
175 |
+
st.chat_message("user").write(prompt)
|
176 |
+
with st.spinner("Denke nach..."):
|
177 |
+
response = support_agent.handle_query(prompt, customer_id)
|
178 |
+
st.session_state.messages.append({"role": "assistant", "content": response})
|
179 |
+
st.chat_message("assistant").write(response)
|
180 |
+
|
181 |
+
# ------------------ Hauptausführung ------------------
|
182 |
+
if __name__ == "__main__":
|
183 |
+
try:
|
184 |
+
main()
|
185 |
+
except Exception as e:
|
186 |
+
logger.error(f"Startup-Fehler: {str(e)}", exc_info=True)
|