Spaces:
Sleeping
Sleeping
import streamlit as st | |
import google.generativeai as genai | |
import os | |
from dotenv import load_dotenv | |
from PIL import Image | |
import tempfile | |
import time | |
import ssl | |
# Charger les variables d'environnement | |
load_dotenv() | |
# Configurer la clé API | |
genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) | |
# Paramètres de sécurité | |
safety_settings = [ | |
{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"}, | |
{"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"}, | |
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"}, | |
{"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"}, | |
] | |
def role_to_streamlit(role): | |
return "assistant" if role == "model" else role | |
def upload_and_process_file(file_path): | |
max_retries = 3 | |
retry_delay = 2 | |
for attempt in range(max_retries): | |
try: | |
if not os.path.exists(file_path): | |
raise FileNotFoundError(f"Le fichier {file_path} n'existe pas") | |
file_size = os.path.getsize(file_path) | |
if file_size == 0: | |
raise ValueError(f"Le fichier {file_path} est vide") | |
uploaded_file = genai.upload_file(path=file_path) | |
timeout = 300 | |
start_time = time.time() | |
while uploaded_file.state.name == "PROCESSING": | |
if time.time() - start_time > timeout: | |
raise TimeoutError("Timeout pendant le traitement du fichier") | |
time.sleep(10) | |
uploaded_file = genai.get_file(uploaded_file.name) | |
if uploaded_file.state.name == "FAILED": | |
raise ValueError(f"Échec du traitement: {uploaded_file.state.name}") | |
return uploaded_file | |
except Exception as e: | |
if attempt < max_retries - 1: | |
time.sleep(retry_delay * (attempt + 1)) | |
else: | |
raise | |
def allowed_file(filename): | |
ALLOWED_EXTENSIONS = {'txt','mp4','mp3','pdf', 'png', 'jpg', 'jpeg', 'gif'} | |
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS | |
# Initialiser le modèle | |
model = genai.GenerativeModel('gemini-1.5-flash', | |
safety_settings=safety_settings, | |
system_instruction="Tu es un assistant intelligent. ton but est d'assister au mieux que tu peux. tu as été créé par Aenir et tu t'appelles Mariam") | |
# Configuration de la page Streamlit | |
st.set_page_config(page_title="Mariam - Assistant IA", page_icon="🤖") | |
st.title("Mariam AI - Chat Intelligent") | |
# Initialiser l'historique de chat | |
if "chat" not in st.session_state: | |
st.session_state.chat = model.start_chat(history=[]) | |
# CSS personnalisé (pour la fixation des éléments) | |
st.markdown(""" | |
<style> | |
/* Conteneur principal pour fixer les éléments en haut et en bas */ | |
.main-container { | |
display: flex; | |
flex-direction: column; | |
height: 95vh; /* Ajustez selon vos besoins */ | |
} | |
/* Conteneur pour l'historique du chat */ | |
.chat-history { | |
flex-grow: 1; | |
overflow-y: auto; | |
display: flex; | |
flex-direction: column-reverse; | |
} | |
/* Conteneur pour la zone de saisie et l'upload */ | |
.input-area { | |
display: flex; | |
align-items: center; | |
gap: 10px; | |
margin-top: 10px; /* Espace entre l'historique et la zone de saisie */ | |
} | |
/* Style pour la zone de saisie */ | |
.chat-input { | |
flex-grow: 1; /* Permet à la zone de saisie de prendre l'espace disponible */ | |
} | |
/* Style pour l'icône */ | |
.upload-icon { | |
font-size: 1.5em; | |
} | |
</style> | |
""", unsafe_allow_html=True) | |
# Conteneur principal | |
main_container = st.container() | |
with main_container: | |
# Conteneur pour l'historique du chat (maintenant inversé avec column-reverse) | |
chat_history_container = st.container() | |
with chat_history_container: | |
# Afficher l'historique des messages | |
for message in st.session_state.chat.history: | |
with st.chat_message(role_to_streamlit(message.role)): | |
st.markdown(message.parts[0].text) | |
if len(message.parts) > 1: | |
for part in message.parts[1:]: | |
if hasattr(part, 'image'): | |
st.image(part.image) | |
# Conteneur pour la zone de saisie et l'upload (en bas) | |
input_area = st.container() | |
with input_area: | |
# Colonnes pour l'icône et l'uploader | |
ucol1, ucol2 = st.columns([1, 5]) | |
with ucol1: | |
# Icône d'upload de fichiers | |
st.markdown("<span class='upload-icon'>📁</span>", unsafe_allow_html=True) | |
with ucol2: | |
# Upload de fichiers (tous types) | |
uploaded_file = st.file_uploader("", type=["txt", "mp4", "mp3", "pdf", "png", "jpg", "jpeg", "gif"], | |
accept_multiple_files=True, key="files", | |
label_visibility="collapsed") | |
# Zone de saisie (placée après l'uploader pour qu'elle soit en bas) | |
prompt = st.chat_input("Que puis-je faire pour vous ?", key="chat_input") | |
# Appliquer les styles aux éléments (simplifié) | |
st.markdown(f""" | |
<style> | |
div[data-testid='stChatInput'] {{ | |
flex-grow: 1; | |
}} | |
div[data-testid='stFileUploader'] {{ | |
display: inline-flex; | |
padding-left: 0; | |
padding-right: 0; | |
padding-bottom: 0; | |
padding-top: 0; | |
margin-left: 0; | |
margin-right: 0; | |
}} | |
div[data-testid='stFileUploader'] > div:nth-child(2) {{ | |
display: none; | |
}} | |
</style> | |
""", unsafe_allow_html=True) | |
if prompt: | |
content = [prompt] | |
temp_files = [] | |
try: | |
# Traitement des fichiers uploadés | |
if uploaded_file: | |
for file in uploaded_file: | |
if allowed_file(file.name): | |
file_extension = os.path.splitext(file.name)[1].lower() | |
if file_extension in ['.jpg', '.jpeg', '.png', '.gif']: | |
# Si c'est une image, la traiter comme une image | |
image = Image.open(file) | |
content.append(image) | |
st.chat_message("user").image(image) | |
else: | |
# Sinon, la traiter comme un fichier | |
with tempfile.NamedTemporaryFile(delete=False, suffix=file_extension) as temp_file: | |
temp_file.write(file.getvalue()) | |
temp_files.append(temp_file.name) | |
uploaded_file = upload_and_process_file(temp_file.name) | |
content.append(uploaded_file) | |
# Afficher le message utilisateur | |
st.chat_message("user").markdown(prompt) | |
# Envoyer le message et afficher la réponse | |
response = st.session_state.chat.send_message(content) | |
with st.chat_message("assistant"): | |
st.markdown(response.text) | |
except Exception as e: | |
st.error(f"Une erreur est survenue : {str(e)}") | |
finally: | |
# Nettoyage des fichiers temporaires | |
for temp_file in temp_files: | |
try: | |
os.unlink(temp_file) | |
except Exception as e: | |
print(f"Erreur lors de la suppression du fichier temporaire {temp_file}: {e}") |