Alibrown commited on
Commit
6325f15
·
verified ·
1 Parent(s): 74a0fdb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +84 -101
app.py CHANGED
@@ -20,121 +20,104 @@ logging.basicConfig(
20
  format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
21
  )
22
  logger = logging.getLogger(__name__)
23
- logger.info("Starte App mit HF-Dataset Memory...")
24
 
25
- # ------------------ Hugging Face Token laden ------------------
26
- HF_TOKEN_MEMORY = os.getenv('HF_TOKEN_MEMORY', '').strip()
27
- if HF_TOKEN_MEMORY:
28
- logger.info("Hugging Face Token gefunden.")
29
- else:
30
- logger.warning("Kein Hugging Face Token gefunden. Falls benötigt, bitte setzen!")
31
-
32
- # ------------------ Einstellungen für das Memory-Dataset ------------------
33
- DATASET_REPO = "AiCodeCarft/customer_memory"
34
-
35
- def load_memory_dataset():
36
- try:
37
- ds = load_dataset(DATASET_REPO, split="train")
38
- logger.info("Dataset erfolgreich vom HF Hub geladen.")
39
- except Exception as e:
40
- logger.info("Kein Dataset gefunden. Erstelle ein neues Dataset...")
41
- data = {"user_id": [], "query": [], "response": []}
42
- ds = Dataset.from_dict(data)
43
- ds.push_to_hub(DATASET_REPO)
44
- logger.info("Neues Dataset erfolgreich erstellt und gepusht.")
45
- return ds
46
-
47
- def add_to_memory(user_id, query, response):
48
- ds = load_memory_dataset()
49
- new_entry = Dataset.from_dict({
50
- "user_id": [user_id],
51
- "query": [query],
52
- "response": [response]
53
- })
54
- updated_ds = concatenate_datasets([ds, new_entry])
55
- updated_ds.push_to_hub(DATASET_REPO)
56
- logger.info("Memory-Dataset erfolgreich aktualisiert.")
57
-
58
- def get_memory(user_id):
59
- ds = load_memory_dataset()
60
- filtered_ds = ds.filter(lambda x: x["user_id"] == user_id)
61
- logger.info(f"Memory für User {user_id} abgerufen. {len(filtered_ds)} Einträge gefunden.")
62
- return filtered_ds
63
-
64
- # ------------------ Streamlit App UI ------------------
65
- st.title("AI Customer Support Agent with Memory 🛒")
66
- st.caption("Chat with a customer support assistant who remembers your past interactions.")
67
-
68
- # OpenAI API Key Eingabe oben in der Haupt-UI
69
- openai_api_key = st.text_input("Enter OpenAI API Key", type="password")
70
-
71
- if not openai_api_key:
72
- st.warning("⚠️ Please enter your OpenAI API key to continue.")
73
- st.stop()
74
 
75
- openai.api_key = openai_api_key # Direktes Setzen des API-Keys
76
 
77
- # ------------------ Klasse: CustomerSupportAIAgent ------------------
78
- class CustomerSupportAIAgent:
79
- def __init__(self):
80
- self.client = openai
81
- self.app_id = "customer-support"
82
 
83
- def handle_query(self, query, user_id=None):
 
84
  try:
85
- memories = get_memory(user_id)
86
- context = "Relevant past information:\n"
87
- if len(memories) > 0:
88
- for entry in memories:
89
- context += f"- Query: {entry['query']}\n Response: {entry['response']}\n"
 
 
 
 
 
 
 
90
 
91
- full_prompt = f"{context}\nCustomer: {query}\nSupport Agent:"
 
 
 
 
92
 
93
- # API-Key wird direkt übergeben
94
- answer = self.client.ChatCompletion.create(
95
  model="gpt-3.5-turbo",
96
  messages=[
97
- {"role": "system", "content": "You are a customer support AI for TechGadgets.com."},
98
- {"role": "user", "content": full_prompt}
99
  ]
100
- ).choices[0].message.content
 
 
 
 
 
 
 
 
 
 
 
 
101
 
102
- add_to_memory(user_id, query, answer)
103
  return answer
104
- except Exception as e:
105
- logger.error(f"Fehler bei handle_query: {e}")
106
- return "Sorry, I encountered an error. Please try again later."
107
-
108
- # ------------------ Initialisierung ------------------
109
- support_agent = CustomerSupportAIAgent()
110
 
111
- # ------------------ Sidebar-Komponenten ------------------
112
- with st.sidebar:
113
- st.title("Customer ID")
114
- customer_id = st.text_input("Enter your Customer ID", key="customer_id")
115
-
116
- if 'customer_id' in st.session_state and st.session_state.customer_id:
117
- if st.button("Generate Synthetic Data"):
118
- # ... (deine bestehende Synthetic Data Logik)
119
-
120
- # ------------------ Chat-History Management ------------------
121
- if "messages" not in st.session_state:
122
- st.session_state.messages = []
123
 
124
- # ------------------ Chat-Eingabe ------------------
125
- if prompt := st.chat_input("How can I assist you today?"):
126
  if not customer_id:
127
- st.error(" Please enter a customer ID first")
128
  st.stop()
129
-
130
- st.session_state.messages.append({"role": "user", "content": prompt})
131
-
132
- with st.spinner("Generating response..."):
133
- response = support_agent.handle_query(prompt, customer_id)
134
-
135
- st.session_state.messages.append({"role": "assistant", "content": response})
136
 
137
- # ------------------ Nachrichten anzeigen ------------------
138
- for message in st.session_state.messages:
139
- with st.chat_message(message["role"]):
140
- st.markdown(message["content"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
21
  )
22
  logger = logging.getLogger(__name__)
 
23
 
24
+ # ------------------ Initialisierung ------------------
25
+ def main():
26
+ logger.info("App-Initialisierung gestartet...")
27
+
28
+ # ------------------ Hugging Face Token ------------------
29
+ HF_TOKEN_MEMORY = os.getenv("HF_TOKEN_MEMORY", st.secrets.get("HF_TOKEN_MEMORY", ""))
30
+ if not HF_TOKEN_MEMORY:
31
+ st.warning("Hugging Face Token fehlt (für Dataset-Zugriff)")
32
+
33
+ # ------------------ Streamlit UI ------------------
34
+ st.title("AI Customer Support Agent with Memory 🛒")
35
+ st.caption("Chat with an assistant who remembers past interactions")
36
+
37
+ # OpenAI Key Eingabe
38
+ openai_api_key = st.text_input("Enter OpenAI API Key", type="password", key="openai_key")
39
+ if not openai_api_key:
40
+ st.warning("⚠️ API-Key benötigt")
41
+ st.stop()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
 
43
+ openai.api_key = openai_api_key
44
 
45
+ # ------------------ Dataset Funktionen ------------------
46
+ DATASET_REPO = "AiCodeCarft/customer_memory"
 
 
 
47
 
48
+ @st.cache_resource
49
+ def load_memory_dataset():
50
  try:
51
+ return load_dataset(DATASET_REPO, split="train", use_auth_token=HF_TOKEN_MEMORY)
52
+ except:
53
+ return Dataset.from_dict({"user_id": [], "query": [], "response": []})
54
+
55
+ # ------------------ AI Agent Klasse ------------------
56
+ class CustomerSupportAIAgent:
57
+ def __init__(self):
58
+ self.memory = load_memory_dataset()
59
+
60
+ def handle_query(self, query, user_id):
61
+ # Memory abrufen
62
+ user_history = self.memory.filter(lambda x: x["user_id"] == user_id)
63
 
64
+ # Kontext erstellen
65
+ context = "Previous interactions:\n" + "\n".join(
66
+ [f"Q: {h['query']}\nA: {h['response']}"
67
+ for h in user_history]
68
+ ) if len(user_history) > 0 else "No previous interactions"
69
 
70
+ # API-Anfrage
71
+ response = openai.ChatCompletion.create(
72
  model="gpt-3.5-turbo",
73
  messages=[
74
+ {"role": "system", "content": f"You are a support agent. Context:\n{context}"},
75
+ {"role": "user", "content": query}
76
  ]
77
+ )
78
+
79
+ # Antwort verarbeiten
80
+ answer = response.choices[0].message.content
81
+
82
+ # Memory aktualisieren
83
+ new_entry = Dataset.from_dict({
84
+ "user_id": [user_id],
85
+ "query": [query],
86
+ "response": [answer]
87
+ })
88
+ self.memory = concatenate_datasets([self.memory, new_entry])
89
+ self.memory.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY)
90
 
 
91
  return answer
 
 
 
 
 
 
92
 
93
+ # ------------------ App-Logik ------------------
94
+ support_agent = CustomerSupportAIAgent()
 
 
 
 
 
 
 
 
 
 
95
 
96
+ # Customer ID Handling
97
+ customer_id = st.sidebar.text_input("Customer ID", key="cust_id")
98
  if not customer_id:
99
+ st.sidebar.error("Bitte Customer ID eingeben")
100
  st.stop()
 
 
 
 
 
 
 
101
 
102
+ # Chat-History
103
+ if "messages" not in st.session_state:
104
+ st.session_state.messages = []
105
+
106
+ # Nachrichten anzeigen
107
+ for msg in st.session_state.messages:
108
+ st.chat_message(msg["role"]).write(msg["content"])
109
+
110
+ # Eingabe verarbeiten
111
+ if prompt := st.chat_input("Your question"):
112
+ st.session_state.messages.append({"role": "user", "content": prompt})
113
+ st.chat_message("user").write(prompt)
114
+
115
+ with st.spinner("Denke nach..."):
116
+ response = support_agent.handle_query(prompt, customer_id)
117
+
118
+ st.session_state.messages.append({"role": "assistant", "content": response})
119
+ st.chat_message("assistant").write(response)
120
+
121
+ # ------------------ Hauptausführung ------------------
122
+ if __name__ == "__main__":
123
+ main()