Chatm / app.py
Docfile's picture
Update app.py
47af5a5 verified
raw
history blame
6.79 kB
from flask import Flask, render_template, request, jsonify, session
import google.generativeai as genai
import os
from dotenv import load_dotenv
import http.client
import json
from werkzeug.utils import secure_filename
import uuid
app = Flask(__name__)
app.secret_key = os.urandom(24)
load_dotenv()
# Configure Google AI
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
# Safety settings
safety_settings = [
{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
]
# System prompt
SYSTEM_PROMPT = """
# Prompt System pour Mariam, IA conçu par youssouf
[Your existing system prompt content here]
"""
# Initialize Gemini model
model = genai.GenerativeModel(
'gemini-2.0-flash',
safety_settings=safety_settings,
system_instruction=SYSTEM_PROMPT
)
# Stockage des sessions de chat
chat_sessions = {}
def perform_web_search(query):
conn = http.client.HTTPSConnection("google.serper.dev")
payload = json.dumps({"q": query})
headers = {
'X-API-KEY': '9b90a274d9e704ff5b21c0367f9ae1161779b573',
'Content-Type': 'application/json'
}
try:
conn.request("POST", "/search", payload, headers)
res = conn.getresponse()
data = json.loads(res.read().decode("utf-8"))
return data
except Exception as e:
print(f"Web search error: {e}")
return None
finally:
conn.close()
def format_search_results(data):
if not data:
return "Aucun résultat trouvé"
result = []
if 'knowledgeGraph' in data:
kg = data['knowledgeGraph']
result.append({
'type': 'knowledge',
'title': kg.get('title', ''),
'description': kg.get('description', ''),
'category': kg.get('type', '')
})
if 'organic' in data:
for item in data['organic'][:3]:
result.append({
'type': 'organic',
'title': item['title'],
'snippet': item['snippet'],
'link': item['link']
})
return result
UPLOAD_FOLDER = 'temp'
ALLOWED_EXTENSIONS = {'jpg', 'jpeg', 'png', 'pdf', 'txt', 'mp3', 'mp4'}
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route('/')
def home():
if 'session_id' not in session:
session['session_id'] = str(uuid.uuid4())
session['messages'] = []
session['uploaded_files'] = [] # initialisation de la liste des fichiers uploadés
return render_template('index.html', messages=session.get('messages', []))
@app.route('/send_message', methods=['POST'])
def send_message():
try:
data = request.json
user_message = data.get('message')
web_search_enabled = data.get('web_search', False)
if not user_message:
return jsonify({'error': 'No message provided'}), 400
# Initialiser la session si nécessaire
if 'session_id' not in session:
session['session_id'] = str(uuid.uuid4())
session['messages'] = []
session['uploaded_files'] = []
session_id = session['session_id']
# Initialiser la session de chat si nécessaire
if session_id not in chat_sessions:
chat_sessions[session_id] = model.start_chat(history=[])
# Intégration des fichiers uploadés dans le prompt
prompt = ""
if 'uploaded_files' in session and session['uploaded_files']:
prompt += "Fichiers uploadés:\n"
for file_info in session['uploaded_files']:
prompt += f"- Nom: {file_info['filename']}, Référence: {file_info['file_id']}\n"
prompt += "\n"
prompt += f"Question: {user_message}"
# Réaliser une recherche web si activée
if web_search_enabled:
web_results = perform_web_search(user_message)
if web_results:
formatted_results = format_search_results(web_results)
prompt += f"\n\nRésultats de recherche web:\n{formatted_results}\n\nPourrais-tu analyser ces informations et me donner une réponse complète?"
# Envoi du message à Gemini
response = chat_sessions[session_id].send_message(prompt)
# Mise à jour de l'historique de chat dans la session
if 'messages' not in session:
session['messages'] = []
current_messages = session['messages']
current_messages.append({
'role': 'user',
'content': prompt
})
current_messages.append({
'role': 'assistant',
'content': response.text
})
session['messages'] = current_messages
# Optionnel : vider la liste des fichiers uploadés après utilisation
session['uploaded_files'] = []
return jsonify({
'response': response.text
})
except Exception as e:
print(f"Error in send_message: {e}")
return jsonify({'error': str(e)}), 500
@app.route('/upload', methods=['POST'])
def upload_file():
if 'file' not in request.files:
return jsonify({'error': 'No file part'}), 400
file = request.files['file']
if file.filename == '':
return jsonify({'error': 'No selected file'}), 400
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filepath = os.path.join(UPLOAD_FOLDER, filename)
file.save(filepath)
try:
gemini_file = genai.upload_file(filepath)
# Stocker la référence du fichier dans la session
if 'uploaded_files' not in session:
session['uploaded_files'] = []
session['uploaded_files'].append({
'filename': filename,
'file_id': gemini_file.get('id') if isinstance(gemini_file, dict) else gemini_file
})
return jsonify({'success': True, 'filename': filename})
except Exception as e:
return jsonify({'error': str(e)}), 500
return jsonify({'error': 'Invalid file type'}), 400
@app.route('/clear_chat', methods=['POST'])
def clear_chat():
session_id = session.get('session_id')
if session_id in chat_sessions:
del chat_sessions[session_id]
session['messages'] = []
session['uploaded_files'] = []
return jsonify({'success': True})
if __name__ == '__main__':
app.run()