|
import streamlit as st |
|
import google.generativeai as genai |
|
import os |
|
from dotenv import load_dotenv |
|
import http.client |
|
import json |
|
|
|
load_dotenv() |
|
|
|
|
|
genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) |
|
|
|
safety_settings = [ |
|
{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"}, |
|
{"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"}, |
|
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"}, |
|
{"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"}, |
|
] |
|
|
|
model = genai.GenerativeModel('gemini-2.0-flash-exp', tools='code_execution', |
|
safety_settings=safety_settings, |
|
system_instruction="Tu es un assistant intelligent. ton but est d'assister au mieux que tu peux. tu as été créé par Aenir et tu t'appelles Mariam") |
|
|
|
def perform_web_search(query): |
|
conn = http.client.HTTPSConnection("google.serper.dev") |
|
payload = json.dumps({"q": query}) |
|
headers = { |
|
'X-API-KEY': '9b90a274d9e704ff5b21c0367f9ae1161779b573', |
|
'Content-Type': 'application/json' |
|
} |
|
try: |
|
conn.request("POST", "/search", payload, headers) |
|
res = conn.getresponse() |
|
data = json.loads(res.read().decode("utf-8")) |
|
return data |
|
except Exception as e: |
|
st.error(f"Erreur lors de la recherche web : {e}") |
|
return None |
|
finally: |
|
conn.close() |
|
|
|
def format_search_results(data): |
|
if not data: |
|
return "Aucun résultat trouvé" |
|
|
|
result = "" |
|
|
|
|
|
if 'knowledgeGraph' in data: |
|
kg = data['knowledgeGraph'] |
|
result += f"### {kg.get('title', '')}\n" |
|
result += f"*{kg.get('type', '')}*\n\n" |
|
result += f"{kg.get('description', '')}\n\n" |
|
|
|
|
|
if 'organic' in data: |
|
result += "### Résultats principaux:\n" |
|
for item in data['organic'][:3]: |
|
result += f"- **{item['title']}**\n" |
|
result += f" {item['snippet']}\n" |
|
result += f" [Lien]({item['link']})\n\n" |
|
|
|
|
|
if 'peopleAlsoAsk' in data: |
|
result += "### Questions fréquentes:\n" |
|
for item in data['peopleAlsoAsk'][:2]: |
|
result += f"- **{item['question']}**\n" |
|
result += f" {item['snippet']}\n\n" |
|
|
|
return result |
|
|
|
def role_to_streamlit(role): |
|
if role == "model": |
|
return "assistant" |
|
else: |
|
return role |
|
|
|
|
|
if "chat" not in st.session_state: |
|
st.session_state.chat = model.start_chat(history=[]) |
|
if "web_search" not in st.session_state: |
|
st.session_state.web_search = False |
|
|
|
|
|
st.title("Mariam AI!") |
|
|
|
|
|
with st.sidebar: |
|
st.title("Paramètres") |
|
st.session_state.web_search = st.toggle("Activer la recherche web", value=st.session_state.web_search) |
|
|
|
|
|
uploaded_file = st.file_uploader("Télécharger un fichier (image/document)", type=['jpg', 'mp4', 'mp3', 'jpeg', 'png', 'pdf', 'txt']) |
|
|
|
|
|
for message in st.session_state.chat.history: |
|
with st.chat_message(role_to_streamlit(message.role)): |
|
st.markdown(message.parts[0].text) |
|
|
|
|
|
def process_uploaded_file(file): |
|
if file is not None: |
|
with open(os.path.join("temp", file.name), "wb") as f: |
|
f.write(file.getbuffer()) |
|
try: |
|
gemini_file = genai.upload_file(os.path.join("temp", file.name)) |
|
return gemini_file |
|
except Exception as e: |
|
st.error(f"Erreur lors du téléchargement du fichier : {e}") |
|
return None |
|
|
|
|
|
if prompt := st.chat_input("Hey?"): |
|
uploaded_gemini_file = None |
|
if uploaded_file: |
|
uploaded_gemini_file = process_uploaded_file(uploaded_file) |
|
|
|
|
|
st.chat_message("user").markdown(prompt) |
|
|
|
try: |
|
|
|
web_results = None |
|
if st.session_state.web_search: |
|
with st.spinner("Recherche web en cours..."): |
|
web_results = perform_web_search(prompt) |
|
if web_results: |
|
formatted_results = format_search_results(web_results) |
|
prompt = f"""Question: {prompt}\n\nRésultats de recherche web:\n{formatted_results}\n\nPourrais-tu analyser ces informations et me donner une réponse complète?""" |
|
|
|
|
|
if uploaded_gemini_file: |
|
response = st.session_state.chat.send_message([uploaded_gemini_file, "\n\n", prompt]) |
|
else: |
|
response = st.session_state.chat.send_message(prompt) |
|
|
|
|
|
with st.chat_message("assistant"): |
|
st.markdown(response.text) |
|
|
|
except Exception as e: |
|
st.error(f"Erreur lors de l'envoi du message : {e}") |
|
|
|
|
|
os.makedirs("temp", exist_ok=True) |