Edurag_beta / app /llm_handling_3.py
Nugh75's picture
update ristrutturazione file app.py con divisione file
47e4aa2
raw
history blame
2.97 kB
import logging
from openai import OpenAI
from langchain_community.vectorstores import FAISS
from langchain_community.embeddings import HuggingFaceEmbeddings
from app.config import OPENAI_API_KEY
import gradio as gr
import os
import shutil
logging.basicConfig(level=logging.INFO)
def answer_question(question, db_name, chat_history=None):
if chat_history is None:
chat_history = []
logging.info(f"Inizio elaborazione domanda: {question} per database: {db_name}")
try:
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
db_path = f"faiss_index_{db_name}"
if not os.path.exists(db_path):
return [{"role": "user", "content": question},
{"role": "assistant", "content": "Database non trovato"}]
vectorstore = FAISS.load_local(db_path, embeddings, allow_dangerous_deserialization=True)
relevant_docs = vectorstore.similarity_search(question, k=3)
# Prepara il contesto dai documenti
context = "\n".join([doc.page_content for doc in relevant_docs])
client = OpenAI(api_key=OPENAI_API_KEY)
messages = [
{"role": "system", "content": f"Usa questo contesto per rispondere: {context}"},
{"role": "user", "content": question}
]
response = client.chat.completions.create(
model="gpt-3.5-turbo", # Cambiato da gpt-4o-mini a un modello supportato
messages=messages,
temperature=0,
max_tokens=2048
)
answer = response.choices[0].message.content
return [
{"role": "user", "content": question},
{"role": "assistant", "content": answer}
]
except Exception as e:
logging.error(f"Errore durante la generazione della risposta: {e}")
return [
{"role": "user", "content": question},
{"role": "assistant", "content": f"Si è verificato un errore: {str(e)}"}
]
# Nel document_handling.py, aggiornare delete_database per restituire anche l'aggiornamento del dropdown
def delete_database(db_name):
db_path = f"faiss_index_{db_name}"
if not os.path.exists(db_path):
return f"Il database {db_name} non esiste.", gr.Dropdown.update(choices=list_databases())
try:
shutil.rmtree(db_path)
logging.info(f"Database {db_name} eliminato con successo.")
return f"Database {db_name} eliminato con successo.", gr.Dropdown.update(choices=list_databases())
except OSError as e:
logging.error(f"Impossibile eliminare il database {db_name}: {e}")
return f"Impossibile eliminare il database {db_name}: {e}", gr.Dropdown.update(choices=list_databases())
# Manca la chiamata a ensure_default_db()
if __name__ == "__main__":
ensure_default_db() # Aggiungere questa chiamata
rag_chatbot.launch(share=True)