Alibrown's picture
Create app.py
a77b059 verified
raw
history blame
7.68 kB
# =============================================================================
# DON'T STEAL THE FREE CODE OF DEVS! Use it for free an do not touch credits!
# If you steal this code, in the future you will pay for apps like this!
# A bit of respect goes a long way – all rights reserved under German law.
# Copyright Volkan Kücükbudak https://github.com/volkansah
# Repo URL: https://github.com/AiCodeCraft
# =============================================================================
import streamlit as st
import os
import json
import datetime
import openai
from datetime import timedelta
import logging
from datasets import load_dataset, Dataset, concatenate_datasets
import sys
import schedule
import threading
import time
print("Python Version:", sys.version)
print("Importiere Module...")
# ------------------ Logging konfigurieren ------------------
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
# ------------------ Initialisierung ------------------
# Als globale Variable definieren
HF_TOKEN_MEMORY = ""
def main():
global HF_TOKEN_MEMORY # Als global markieren
logger.info("App-Initialisierung gestartet...")
# ------------------ Hugging Face Token ------------------
HF_TOKEN_MEMORY = os.getenv("HF_TOKEN_MEMORY", "")
if not HF_TOKEN_MEMORY:
try:
HF_TOKEN_MEMORY = st.secrets["HF_TOKEN_MEMORY"]
logger.info("Token aus Streamlit Secrets geladen")
except Exception as e:
logger.warning(f"Token nicht in Streamlit Secrets gefunden: {str(e)}")
if HF_TOKEN_MEMORY:
token_preview = HF_TOKEN_MEMORY[:4] + "..." if len(HF_TOKEN_MEMORY) > 4 else "Ungültig"
logger.info(f"HF Token gefunden. Startet mit: {token_preview}")
else:
logger.warning("Kein HF Token gefunden! Dataset-Funktionen werden nicht verfügbar sein.")
# ------------------ Streamlit UI ------------------
st.title("AI Customer Support Agent with Memory 🛒")
st.caption("Chat with an assistant who remembers past interactions")
# OpenAI Key Eingabe
openai_api_key = st.text_input("Enter OpenAI API Key", type="password", key="openai_key")
if not openai_api_key:
logger.warning("API-Key benötigt")
st.stop()
openai.api_key = openai_api_key
# ------------------ Dataset Funktionen ------------------
DATASET_REPO = "AiCodeCraft/customer_memory" # Repository existiert bereits
@st.cache_resource
def load_memory_dataset():
"""
Versucht, das Memory-Dataset vom HF Hub zu laden.
Falls nicht vorhanden, wird ein leeres Dataset erstellt und gepusht.
"""
if not HF_TOKEN_MEMORY:
logger.warning("Kein HF Token vorhanden, verwende lokales Dataset")
return Dataset.from_dict({"user_id": [], "query": [], "response": [], "timestamp": []})
try:
logger.info(f"Versuche Dataset {DATASET_REPO} zu laden...")
ds = load_dataset(DATASET_REPO, split="train", token=HF_TOKEN_MEMORY)
logger.info(f"Dataset erfolgreich geladen mit {len(ds)} Einträgen.")
return ds
except Exception as e:
logger.warning(f"Fehler beim Laden des Datasets: {str(e)}")
logger.info("Erstelle neues Dataset...")
data = {"user_id": [], "query": [], "response": [], "timestamp": []}
ds = Dataset.from_dict(data)
try:
ds.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY)
logger.info("Neues Dataset erfolgreich erstellt und gepusht.")
except Exception as push_error:
logger.error(f"Fehler beim Pushen des Datasets: {str(push_error)}")
return ds
# ------------------ AI Agent Klasse ------------------
class CustomerSupportAIAgent:
def __init__(self):
self.memory = load_memory_dataset()
self.schedule_push()
def schedule_push(self):
# Diese Funktion pusht das Dataset periodisch im Hintergrund
def push_memory():
if HF_TOKEN_MEMORY:
try:
self.memory.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY)
logger.info("Memory erfolgreich aktualisiert (periodischer Push)")
except Exception as e:
logger.error(f"Fehler beim periodischen Aktualisieren des Datasets: {str(e)}")
schedule.every(10).minutes.do(push_memory)
threading.Thread(target=self.run_schedule, daemon=True).start()
def run_schedule(self):
while True:
schedule.run_pending()
time.sleep(1)
def handle_query(self, query, user_id):
# Memory abrufen
user_history = self.memory.filter(lambda x: x["user_id"] == user_id)
context = "Previous interactions:\n" + "\n".join(
[f"Q: {h['query']}\nA: {h['response']}" for h in user_history]
) if len(user_history) > 0 else "No previous interactions"
# API-Anfrage
try:
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": f"You are a support agent. Context:\n{context}"},
{"role": "user", "content": query}
]
)
answer = response.choices[0].message.content
except Exception as e:
logger.error(f"Fehler bei OpenAI-API-Anfrage: {str(e)}")
answer = "Entschuldigung, ein Fehler ist aufgetreten."
# Memory aktualisieren mit Timestamp
current_time = datetime.datetime.now().isoformat()
new_entry = Dataset.from_dict({
"user_id": [user_id],
"query": [query],
"response": [answer],
"timestamp": [current_time]
})
self.memory = concatenate_datasets([self.memory, new_entry])
logger.info(f"Memory lokal aktualisiert für User {user_id}")
# Der Push erfolgt periodisch im Hintergrund
return answer
# ------------------ App-Logik ------------------
support_agent = CustomerSupportAIAgent()
# Customer ID Handling
customer_id = st.sidebar.text_input("Customer ID", key="cust_id")
if not customer_id:
logger.warning("Keine Customer ID eingegeben")
st.stop()
# Chat-History initialisieren
if "messages" not in st.session_state:
st.session_state.messages = []
# Zeige nur die letzten 20 Nachrichten an
max_messages = 20
displayed_messages = st.session_state.messages[-max_messages:]
for msg in displayed_messages:
st.chat_message(msg["role"]).write(msg["content"])
# Eingabe verarbeiten
if prompt := st.chat_input("Your question"):
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
with st.spinner("Denke nach..."):
response = support_agent.handle_query(prompt, customer_id)
st.session_state.messages.append({"role": "assistant", "content": response})
st.chat_message("assistant").write(response)
# ------------------ Hauptausführung ------------------
if __name__ == "__main__":
try:
main()
except Exception as e:
logger.error(f"Startup-Fehler: {str(e)}", exc_info=True)