import os import streamlit as st import json import requests import re from bs4 import BeautifulSoup from streamlit_option_menu import option_menu from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce) from PIL import Image # Setting the page config st.set_page_config( page_title="GnosticDev AI", page_icon="馃", layout="centered", initial_sidebar_state="expanded", ) CONFIG_FILE = "config.json" # Funci贸n para cargar configuraciones desde un archivo JSON def load_config(): if os.path.exists(CONFIG_FILE): try: with open(CONFIG_FILE, "r") as file: return json.load(file) except Exception as e: st.error(f"Error cargando configuraciones: {e}") return {} # Funci贸n para guardar configuraciones en un archivo JSON def save_config(config): try: with open(CONFIG_FILE, "w") as file: json.dump(config, file, indent=4) except Exception as e: st.error(f"Error guardando configuraciones: {e}") # Cargar configuraciones al inicio config = load_config() if "system_prompt" not in config: config["system_prompt"] = "" # Inicializar estados if "system_prompt" not in st.session_state: st.session_state.system_prompt = config["system_prompt"] def save_chat_history(history): serializable_history = [] for message in history: serializable_history.append({ "role": message.role, "text": message.parts[0].text }) st.session_state.cookie_chat_history = json.dumps(serializable_history) def load_chat_history(): if 'cookie_chat_history' in st.session_state: try: history = json.loads(st.session_state.cookie_chat_history) model = load_gemini_pro() chat = model.start_chat(history=[]) if st.session_state.system_prompt: chat.send_message(st.session_state.system_prompt) for message in history: if message["role"] != "model" or not message["text"].startswith(st.session_state.system_prompt): chat.send_message(message["text"]) return chat except Exception as e: st.error(f"Error cargando el historial: {e}") return None with st.sidebar: selected = option_menu( "GD AI", ["System Prompt", "Chatbot", "Image Captioning"], menu_icon="robot", icons=['gear', 'chat-dots-fill', 'image-fill'], default_index=0 ) if st.button("Borrar Historial"): if 'cookie_chat_history' in st.session_state: del st.session_state.cookie_chat_history if 'chat_session' in st.session_state: del st.session_state.chat_session st.success("Historial borrado!") def translate_role_to_streamlit(user_role): if user_role == "model": return "assistant" else: return user_role def extract_urls(text): url_pattern = r"(https?://\S+)" urls = re.findall(url_pattern, text) return urls def fetch_url_content(url): try: response = requests.get(url, timeout=10) response.raise_for_status() return response.text except requests.exceptions.RequestException as e: return f"Error al acceder a la URL '{url}': {e}" def process_url_content(content): try: soup = BeautifulSoup(content, "html.parser") text = soup.get_text(" ", strip=True) return text except Exception as e: return f"Error al procesar el contenido HTML: {e}" def process_urls_in_prompt(prompt): urls = extract_urls(prompt) new_prompt = prompt for url in urls: content = fetch_url_content(url) if content.startswith("Error"): new_prompt = new_prompt.replace(url, content) else: processed_content = process_url_content(content) new_prompt = new_prompt.replace(url, processed_content) return new_prompt if selected == "System Prompt": st.title("Configuraci贸n del System Prompt") new_system_prompt = st.text_area( "Ingresa las instrucciones para el AI (System Prompt), incluyendo URLs", value=st.session_state.system_prompt, height=300, help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI. Puedes incluir URLs." ) if st.button("Guardar System Prompt"): processed_prompt = process_urls_in_prompt(new_system_prompt) st.session_state.system_prompt = processed_prompt config["system_prompt"] = processed_prompt save_config(config) if "chat_session" in st.session_state: del st.session_state.chat_session st.success("System Prompt actualizado con 茅xito!") elif selected == "Chatbot": model = load_gemini_pro() if "chat_session" not in st.session_state: loaded_chat = load_chat_history() if loaded_chat: st.session_state.chat_session = loaded_chat else: # Crea una nueva sesi贸n de chat sin enviar el system_prompt como mensaje inicial st.session_state.chat_session = model.start_chat(history=[]) st.title("Gnosticdev Chatbot") # Muestra solo el historial del chat, si existe for message in st.session_state.chat_session.history: with st.chat_message(translate_role_to_streamlit(message.role)): st.markdown(message.parts[0].text) # Entrada del usuario user_prompt = st.chat_input("Preg煤ntame algo...") if user_prompt: processed_user_prompt = process_urls_in_prompt(user_prompt) st.chat_message("user").markdown(processed_user_prompt) gemini_response = st.session_state.chat_session.send_message(processed_user_prompt) with st.chat_message("assistant"): st.markdown(gemini_response.text) save_chat_history(st.session_state.chat_session.history) elif selected == "Image Captioning": st.title("Image Caption Generation馃摳") upload_image = st.file_uploader("Sube una imagen...", type=["jpg", "jpeg", "png"]) if upload_image and st.button("Generar"): try: # Cargar y mostrar la imagen image = Image.open(upload_image) col1, col2 = st.columns(2) with col1: st.image(image, caption="Imagen subida", use_column_width=True) # Generar un subt铆tulo usando una funci贸n de visi贸n AI default_prompt = "Escribe un subt铆tulo para esta imagen" caption = gemini_pro_vision_responce(default_prompt, image) # Mostrar el resultado with col2: st.info(caption) except Exception as e: st.error(f"Error procesando la imagen: {e}") # Mensaje por defecto si ninguna secci贸n est谩 seleccionada else: st.write("Selecciona una opci贸n en el men煤 para comenzar.")