Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -27,21 +27,21 @@ def role_to_streamlit(role):
|
|
27 |
def upload_and_process_file(file_path):
|
28 |
max_retries = 3
|
29 |
retry_delay = 2
|
30 |
-
|
31 |
for attempt in range(max_retries):
|
32 |
try:
|
33 |
if not os.path.exists(file_path):
|
34 |
raise FileNotFoundError(f"Le fichier {file_path} n'existe pas")
|
35 |
-
|
36 |
file_size = os.path.getsize(file_path)
|
37 |
if file_size == 0:
|
38 |
raise ValueError(f"Le fichier {file_path} est vide")
|
39 |
-
|
40 |
uploaded_file = genai.upload_file(path=file_path)
|
41 |
-
|
42 |
timeout = 300
|
43 |
start_time = time.time()
|
44 |
-
|
45 |
while uploaded_file.state.name == "PROCESSING":
|
46 |
if time.time() - start_time > timeout:
|
47 |
raise TimeoutError("Timeout pendant le traitement du fichier")
|
@@ -50,7 +50,7 @@ def upload_and_process_file(file_path):
|
|
50 |
|
51 |
if uploaded_file.state.name == "FAILED":
|
52 |
raise ValueError(f"Échec du traitement: {uploaded_file.state.name}")
|
53 |
-
|
54 |
return uploaded_file
|
55 |
|
56 |
except Exception as e:
|
@@ -64,7 +64,7 @@ def allowed_file(filename):
|
|
64 |
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
65 |
|
66 |
# Initialiser le modèle
|
67 |
-
model = genai.GenerativeModel('gemini-1.5-flash',
|
68 |
safety_settings=safety_settings,
|
69 |
system_instruction="Tu es un assistant intelligent. ton but est d'assister au mieux que tu peux. tu as été créé par Aenir et tu t'appelles Mariam")
|
70 |
|
@@ -72,90 +72,126 @@ model = genai.GenerativeModel('gemini-1.5-flash',
|
|
72 |
st.set_page_config(page_title="Mariam - Assistant IA", page_icon="🤖")
|
73 |
st.title("Mariam AI - Chat Intelligent")
|
74 |
|
75 |
-
#
|
|
|
|
|
|
|
|
|
76 |
st.markdown("""
|
77 |
<style>
|
78 |
-
|
|
|
|
|
|
|
|
|
79 |
margin-bottom: 10px;
|
80 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
81 |
.upload-container {
|
82 |
display: flex;
|
83 |
align-items: center;
|
84 |
-
gap:
|
85 |
}
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
right: 0;
|
91 |
-
z-index: 1000;
|
92 |
-
background-color: white;
|
93 |
-
padding: 10px;
|
94 |
-
box-shadow: 0 -2px 5px rgba(0,0,0,0.1);
|
95 |
-
}
|
96 |
-
.main-content {
|
97 |
-
margin-bottom: 80px; /* Pour laisser de l'espace pour l'input fixe */
|
98 |
}
|
99 |
</style>
|
100 |
""", unsafe_allow_html=True)
|
101 |
|
102 |
-
# Initialiser l'historique de chat
|
103 |
-
if "chat" not in st.session_state:
|
104 |
-
st.session_state.chat = model.start_chat(history=[])
|
105 |
-
|
106 |
-
# Conteneur principal avec marge en bas
|
107 |
-
main_container = st.container()
|
108 |
-
main_container.markdown('<div class="main-content">', unsafe_allow_html=True)
|
109 |
-
|
110 |
-
# Zone d'upload en haut
|
111 |
-
upload_container = st.container()
|
112 |
-
with upload_container:
|
113 |
-
uploaded_files = st.file_uploader("📁",
|
114 |
-
type=["txt","mp4","mp3","pdf", "jpg", "jpeg", "png", "gif"],
|
115 |
-
accept_multiple_files=True)
|
116 |
-
|
117 |
# Afficher l'historique des messages
|
118 |
for message in st.session_state.chat.history:
|
119 |
-
with
|
120 |
st.markdown(message.parts[0].text)
|
121 |
if len(message.parts) > 1:
|
122 |
for part in message.parts[1:]:
|
123 |
if hasattr(part, 'image'):
|
124 |
st.image(part.image)
|
125 |
|
126 |
-
#
|
127 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
128 |
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
133 |
|
134 |
if prompt:
|
135 |
content = [prompt]
|
136 |
temp_files = []
|
137 |
-
|
138 |
try:
|
139 |
-
# Traitement des
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
140 |
if uploaded_files:
|
141 |
for file in uploaded_files:
|
142 |
if allowed_file(file.name):
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
else:
|
149 |
-
# Pour les autres types de fichiers
|
150 |
-
with tempfile.NamedTemporaryFile(delete=False, suffix=os.path.splitext(file.name)[1]) as temp_file:
|
151 |
-
temp_file.write(file.getvalue())
|
152 |
-
temp_files.append(temp_file.name)
|
153 |
-
uploaded_file = upload_and_process_file(temp_file.name)
|
154 |
-
content.append(uploaded_file)
|
155 |
|
156 |
# Afficher le message utilisateur
|
157 |
st.chat_message("user").markdown(prompt)
|
158 |
-
|
159 |
# Envoyer le message et afficher la réponse
|
160 |
response = st.session_state.chat.send_message(content)
|
161 |
with st.chat_message("assistant"):
|
@@ -163,7 +199,7 @@ if prompt:
|
|
163 |
|
164 |
except Exception as e:
|
165 |
st.error(f"Une erreur est survenue : {str(e)}")
|
166 |
-
|
167 |
finally:
|
168 |
# Nettoyage des fichiers temporaires
|
169 |
for temp_file in temp_files:
|
|
|
27 |
def upload_and_process_file(file_path):
|
28 |
max_retries = 3
|
29 |
retry_delay = 2
|
30 |
+
|
31 |
for attempt in range(max_retries):
|
32 |
try:
|
33 |
if not os.path.exists(file_path):
|
34 |
raise FileNotFoundError(f"Le fichier {file_path} n'existe pas")
|
35 |
+
|
36 |
file_size = os.path.getsize(file_path)
|
37 |
if file_size == 0:
|
38 |
raise ValueError(f"Le fichier {file_path} est vide")
|
39 |
+
|
40 |
uploaded_file = genai.upload_file(path=file_path)
|
41 |
+
|
42 |
timeout = 300
|
43 |
start_time = time.time()
|
44 |
+
|
45 |
while uploaded_file.state.name == "PROCESSING":
|
46 |
if time.time() - start_time > timeout:
|
47 |
raise TimeoutError("Timeout pendant le traitement du fichier")
|
|
|
50 |
|
51 |
if uploaded_file.state.name == "FAILED":
|
52 |
raise ValueError(f"Échec du traitement: {uploaded_file.state.name}")
|
53 |
+
|
54 |
return uploaded_file
|
55 |
|
56 |
except Exception as e:
|
|
|
64 |
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
65 |
|
66 |
# Initialiser le modèle
|
67 |
+
model = genai.GenerativeModel('gemini-1.5-flash',
|
68 |
safety_settings=safety_settings,
|
69 |
system_instruction="Tu es un assistant intelligent. ton but est d'assister au mieux que tu peux. tu as été créé par Aenir et tu t'appelles Mariam")
|
70 |
|
|
|
72 |
st.set_page_config(page_title="Mariam - Assistant IA", page_icon="🤖")
|
73 |
st.title("Mariam AI - Chat Intelligent")
|
74 |
|
75 |
+
# Initialiser l'historique de chat
|
76 |
+
if "chat" not in st.session_state:
|
77 |
+
st.session_state.chat = model.start_chat(history=[])
|
78 |
+
|
79 |
+
# CSS personnalisé (amélioré)
|
80 |
st.markdown("""
|
81 |
<style>
|
82 |
+
/* Conteneur principal pour la zone de saisie et les uploads */
|
83 |
+
.input-area {
|
84 |
+
display: flex;
|
85 |
+
align-items: center;
|
86 |
+
gap: 10px;
|
87 |
margin-bottom: 10px;
|
88 |
}
|
89 |
+
|
90 |
+
/* Style pour la zone de saisie */
|
91 |
+
.chat-input {
|
92 |
+
flex-grow: 1; /* Permet à la zone de saisie de prendre l'espace disponible */
|
93 |
+
}
|
94 |
+
|
95 |
+
/* Style pour les conteneurs d'upload */
|
96 |
.upload-container {
|
97 |
display: flex;
|
98 |
align-items: center;
|
99 |
+
gap: 5px;
|
100 |
}
|
101 |
+
|
102 |
+
/* Style pour les icônes (ajustez la taille si nécessaire) */
|
103 |
+
.upload-icon {
|
104 |
+
font-size: 1.5em;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
105 |
}
|
106 |
</style>
|
107 |
""", unsafe_allow_html=True)
|
108 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
# Afficher l'historique des messages
|
110 |
for message in st.session_state.chat.history:
|
111 |
+
with st.chat_message(role_to_streamlit(message.role)):
|
112 |
st.markdown(message.parts[0].text)
|
113 |
if len(message.parts) > 1:
|
114 |
for part in message.parts[1:]:
|
115 |
if hasattr(part, 'image'):
|
116 |
st.image(part.image)
|
117 |
|
118 |
+
# Créer le conteneur principal pour la zone de saisie et les uploads
|
119 |
+
input_area = st.container()
|
120 |
+
|
121 |
+
with input_area:
|
122 |
+
# Zone de saisie
|
123 |
+
prompt = st.chat_input("Que puis-je faire pour vous ?", key="chat_input")
|
124 |
+
|
125 |
+
# Colonnes pour les icônes d'upload (utiliser plus de colonnes pour un espacement plus fin)
|
126 |
+
ucol1, ucol2, ucol3, ucol4 = st.columns([1,1,1,1])
|
127 |
+
|
128 |
+
with ucol1:
|
129 |
+
# Icône d'upload de fichiers
|
130 |
+
st.markdown("<span class='upload-icon'>📁</span>", unsafe_allow_html=True)
|
131 |
|
132 |
+
with ucol2:
|
133 |
+
# Upload de fichiers
|
134 |
+
uploaded_files = st.file_uploader("", type=["txt", "mp4", "mp3", "pdf"],
|
135 |
+
accept_multiple_files=True, key="files",
|
136 |
+
label_visibility="collapsed")
|
137 |
+
|
138 |
+
with ucol3:
|
139 |
+
# Icône d'upload d'images
|
140 |
+
st.markdown("<span class='upload-icon'>📸</span>", unsafe_allow_html=True)
|
141 |
+
|
142 |
+
with ucol4:
|
143 |
+
# Upload d'images
|
144 |
+
uploaded_images = st.file_uploader("", type=["jpg", "jpeg", "png", "gif"],
|
145 |
+
accept_multiple_files=True, key="images",
|
146 |
+
label_visibility="collapsed")
|
147 |
+
|
148 |
+
# Appliquer les styles aux éléments
|
149 |
+
st.markdown(f"""
|
150 |
+
<style>
|
151 |
+
div[data-testid='stChatInput'] {{
|
152 |
+
flex-grow: 1;
|
153 |
+
}}
|
154 |
+
div[data-testid='stFileUploader'] {{
|
155 |
+
display: inline-flex;
|
156 |
+
padding-left: 0;
|
157 |
+
padding-right: 0;
|
158 |
+
padding-bottom: 0;
|
159 |
+
padding-top: 0;
|
160 |
+
margin-left: 0;
|
161 |
+
margin-right: 0;
|
162 |
+
}}
|
163 |
+
div[data-testid='stFileUploader'] > div:nth-child(2) {{
|
164 |
+
display: none;
|
165 |
+
}}
|
166 |
+
</style>
|
167 |
+
""", unsafe_allow_html=True)
|
168 |
|
169 |
if prompt:
|
170 |
content = [prompt]
|
171 |
temp_files = []
|
172 |
+
|
173 |
try:
|
174 |
+
# Traitement des images
|
175 |
+
if uploaded_images:
|
176 |
+
for img_file in uploaded_images:
|
177 |
+
if allowed_file(img_file.name):
|
178 |
+
image = Image.open(img_file)
|
179 |
+
content.append(image)
|
180 |
+
st.chat_message("user").image(image)
|
181 |
+
|
182 |
+
# Traitement des autres fichiers
|
183 |
if uploaded_files:
|
184 |
for file in uploaded_files:
|
185 |
if allowed_file(file.name):
|
186 |
+
with tempfile.NamedTemporaryFile(delete=False, suffix=os.path.splitext(file.name)[1]) as temp_file:
|
187 |
+
temp_file.write(file.getvalue())
|
188 |
+
temp_files.append(temp_file.name)
|
189 |
+
uploaded_file = upload_and_process_file(temp_file.name)
|
190 |
+
content.append(uploaded_file)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
191 |
|
192 |
# Afficher le message utilisateur
|
193 |
st.chat_message("user").markdown(prompt)
|
194 |
+
|
195 |
# Envoyer le message et afficher la réponse
|
196 |
response = st.session_state.chat.send_message(content)
|
197 |
with st.chat_message("assistant"):
|
|
|
199 |
|
200 |
except Exception as e:
|
201 |
st.error(f"Une erreur est survenue : {str(e)}")
|
202 |
+
|
203 |
finally:
|
204 |
# Nettoyage des fichiers temporaires
|
205 |
for temp_file in temp_files:
|