import os import streamlit as st import json from streamlit_option_menu import option_menu from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce) from PIL import Image # Configuraci贸n de la p谩gina st.set_page_config( page_title="GnosticDev AI", page_icon="馃", layout="centered", initial_sidebar_state="expanded", ) # Funci贸n para guardar el historial en cookies def save_chat_history(history): serializable_history = [] for message in history: serializable_history.append({ "role": message.role, "text": message.parts[0].text }) st.session_state.cookie_chat_history = json.dumps(serializable_history) # Funci贸n para cargar el historial desde cookies def load_chat_history(): if 'cookie_chat_history' in st.session_state: try: history = json.loads(st.session_state.cookie_chat_history) model = load_gemini_pro() chat = model.start_chat(history=[]) if st.session_state.system_prompt: chat.send_message(st.session_state.system_prompt) for message in history: if message["role"] != "model" or not message["text"].startswith(st.session_state.system_prompt): chat.send_message(message["text"]) return chat except Exception as e: st.error(f"Error cargando el historial: {e}") return None # Inicializar estados if "system_prompt" not in st.session_state: st.session_state.system_prompt = st.session_state.get('cookie_system_prompt', "") with st.sidebar: selected = option_menu( "GD AI", ["System Prompt", "Chatbot", "Image Captioning"], menu_icon="robot", icons=['gear', 'chat-dots-fill', 'image-fill'], default_index=0 ) # Bot贸n para borrar historial if st.button("Borrar Historial"): if 'cookie_chat_history' in st.session_state: del st.session_state.cookie_chat_history if 'chat_session' in st.session_state: del st.session_state.chat_session st.success("Historial borrado!") def translate_role_to_streamlit(user_role): return "assistant" if user_role == "model" else user_role if selected == "System Prompt": st.title("Configuraci贸n del System Prompt") new_system_prompt = st.text_area( "Ingresa las instrucciones para el AI (System Prompt)", value=st.session_state.system_prompt, height=300, help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI" ) if st.button("Guardar System Prompt"): st.session_state.system_prompt = new_system_prompt st.session_state.cookie_system_prompt = new_system_prompt if "chat_session" in st.session_state: del st.session_state.chat_session st.success("System Prompt actualizado con 茅xito!") if st.session_state.system_prompt: st.markdown("### System Prompt Actual:") st.info(st.session_state.system_prompt) elif selected == "Chatbot": model = load_gemini_pro() # Inicializar o cargar sesi贸n de chat if "chat_session" not in st.session_state: loaded_chat = load_chat_history() if loaded_chat: st.session_state.chat_session = loaded_chat else: st.session_state.chat_session = model.start_chat(history=[]) if st.session_state.system_prompt: st.session_state.chat_session.send_message(st.session_state.system_prompt) st.title("Gnosticdev Chatbot") if st.session_state.system_prompt: with st.expander("Ver System Prompt actual"): st.info(st.session_state.system_prompt) # Mostrar historial for message in st.session_state.chat_session.history: with st.chat_message(translate_role_to_streamlit(message.role)): st.markdown(message.parts[0].text) # Campo de entrada user_prompt = st.chat_input("Preguntame algo...") if user_prompt: st.chat_message("user").markdown(user_prompt) gemini_response = st.session_state.chat_session.send_message(user_prompt) with st.chat_message("assistant"): st.markdown(gemini_response.text) # Guardar historial actualizado save_chat_history(st.session_state.chat_session.history) elif selected == "Image Captioning": st.title("Image Caption Generation馃摳") upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "