|
import os |
|
import streamlit as st |
|
import json |
|
from streamlit_option_menu import option_menu |
|
from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce) |
|
from PIL import Image |
|
import time |
|
|
|
|
|
st.set_page_config( |
|
page_title="GnosticDev AI", |
|
page_icon="馃", |
|
layout="centered", |
|
initial_sidebar_state="expanded", |
|
) |
|
|
|
|
|
def translate_role_to_streamlit(role): |
|
""" |
|
Translate Gemini message roles to Streamlit chat roles |
|
""" |
|
role_mapping = { |
|
'model': 'assistant', |
|
'user': 'user' |
|
} |
|
return role_mapping.get(role, 'assistant') |
|
|
|
|
|
PROMPTS_FILE = "training_prompts.json" |
|
|
|
|
|
def load_prompts(): |
|
try: |
|
if os.path.exists(PROMPTS_FILE): |
|
with open(PROMPTS_FILE, 'r', encoding='utf-8') as file: |
|
return json.load(file) |
|
except Exception as e: |
|
st.error(f"Error cargando prompts: {e}") |
|
return {"prompts": [], "current_prompt": ""} |
|
|
|
def save_prompts(prompts_data): |
|
try: |
|
with open(PROMPTS_FILE, 'w', encoding='utf-8') as file: |
|
json.dump(prompts_data, file, ensure_ascii=False, indent=2) |
|
except Exception as e: |
|
st.error(f"Error guardando prompts: {e}") |
|
|
|
|
|
if 'prompts_data' not in st.session_state: |
|
st.session_state.prompts_data = load_prompts() |
|
|
|
with st.sidebar: |
|
selected = option_menu( |
|
"GD AI", |
|
["System Prompts", "Chatbot", "Image Captioning"], |
|
menu_icon="robot", |
|
icons=['gear', 'chat-dots-fill', 'image-fill'], |
|
default_index=0 |
|
) |
|
|
|
if selected == "System Prompts": |
|
st.title("Gesti贸n de System Prompts") |
|
|
|
|
|
new_prompt = st.text_area( |
|
"Nuevo System Prompt", |
|
value="", |
|
height=200, |
|
help="Escribe aqu铆 las nuevas instrucciones para el AI" |
|
) |
|
|
|
col1, col2 = st.columns([1, 2]) |
|
with col1: |
|
if st.button("A帽adir Nuevo Prompt"): |
|
if new_prompt and new_prompt not in st.session_state.prompts_data["prompts"]: |
|
st.session_state.prompts_data["prompts"].append(new_prompt) |
|
save_prompts(st.session_state.prompts_data) |
|
st.success("Nuevo prompt a帽adido!") |
|
time.sleep(1) |
|
st.rerun() |
|
|
|
|
|
st.markdown("### Prompts Guardados") |
|
for i, prompt in enumerate(st.session_state.prompts_data["prompts"]): |
|
with st.expander(f"Prompt {i+1}"): |
|
st.text_area("", prompt, height=100, key=f"prompt_{i}", disabled=True) |
|
col1, col2, col3 = st.columns([1, 1, 1]) |
|
with col1: |
|
if st.button("Usar este prompt", key=f"use_{i}"): |
|
st.session_state.prompts_data["current_prompt"] = prompt |
|
save_prompts(st.session_state.prompts_data) |
|
if "chat_session" in st.session_state: |
|
del st.session_state.chat_session |
|
st.success("Prompt activado!") |
|
with col2: |
|
if st.button("Editar", key=f"edit_{i}"): |
|
st.session_state.editing_prompt = i |
|
st.session_state.editing_text = prompt |
|
with col3: |
|
if st.button("Eliminar", key=f"delete_{i}"): |
|
st.session_state.prompts_data["prompts"].pop(i) |
|
save_prompts(st.session_state.prompts_data) |
|
st.success("Prompt eliminado!") |
|
time.sleep(1) |
|
st.rerun() |
|
|
|
|
|
st.markdown("### Prompt Actual") |
|
current_prompt = st.session_state.prompts_data.get("current_prompt", "") |
|
if current_prompt: |
|
st.info(current_prompt) |
|
else: |
|
st.warning("No hay prompt activo") |
|
|
|
elif selected == "Chatbot": |
|
model = load_gemini_pro() |
|
|
|
|
|
if "chat_session" not in st.session_state: |
|
st.session_state.chat_session = model.start_chat(history=[]) |
|
current_prompt = st.session_state.prompts_data.get("current_prompt") |
|
if current_prompt: |
|
st.session_state.chat_session.send_message(current_prompt) |
|
|
|
st.title("Gnosticdev Chatbot") |
|
|
|
|
|
current_prompt = st.session_state.prompts_data.get("current_prompt") |
|
if current_prompt: |
|
with st.expander("Ver System Prompt actual"): |
|
st.info(current_prompt) |
|
|
|
|
|
if hasattr(st.session_state.chat_session, 'history'): |
|
for message in st.session_state.chat_session.history: |
|
role = translate_role_to_streamlit(message.role) |
|
with st.chat_message(role): |
|
st.markdown(message.parts[0].text) |
|
|
|
|
|
user_prompt = st.chat_input("Preguntame algo...") |
|
if user_prompt: |
|
st.chat_message("user").markdown(user_prompt) |
|
try: |
|
gemini_response = st.session_state.chat_session.send_message(user_prompt) |
|
with st.chat_message("assistant"): |
|
st.markdown(gemini_response.text) |
|
except Exception as e: |
|
st.error(f"Error en la respuesta: {str(e)}") |
|
|
|
elif selected == "Image Captioning": |
|
st.title("Image Caption Generation馃摳") |
|
|
|
|
|
use_custom_prompt = st.checkbox("Usar prompt personalizado") |
|
if use_custom_prompt: |
|
custom_prompt = st.text_area( |
|
"Escribe tu prompt personalizado", |
|
value="Write a caption for this image", |
|
help="Puedes personalizar las instrucciones para el an谩lisis de la imagen" |
|
) |
|
|
|
upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"]) |
|
|
|
if upload_image: |
|
try: |
|
image = Image.open(upload_image) |
|
col1, col2 = st.columns(2) |
|
|
|
with col1: |
|
st.image(image, caption="Uploaded Image", use_column_width=True) |
|
image_info = f"Dimensiones: {image.size[0]}x{image.size[1]}" |
|
st.text(image_info) |
|
|
|
if st.button("Generate", key="generate_caption"): |
|
with st.spinner("Generando descripci贸n..."): |
|
prompt = custom_prompt if use_custom_prompt else "Write a caption for this image" |
|
try: |
|
caption = gemini_pro_vision_responce(prompt, image) |
|
with col2: |
|
st.success("隆Descripci贸n generada!") |
|
st.info(caption) |
|
|
|
|
|
if st.button("Guardar descripci贸n"): |
|
timestamp = time.strftime("%Y%m%d-%H%M%S") |
|
filename = f"caption_{timestamp}.txt" |
|
try: |
|
with open(filename, "w", encoding="utf-8") as f: |
|
f.write(caption) |
|
st.success(f"Descripci贸n guardada en {filename}") |
|
except Exception as e: |
|
st.error(f"Error al guardar la descripci贸n: {str(e)}") |
|
|
|
except Exception as e: |
|
st.error(f"Error al generar la descripci贸n: {str(e)}") |
|
st.error("Por favor, intenta con otra imagen o revisa tu conexi贸n") |
|
|
|
except Exception as e: |
|
st.error(f"Error al procesar la imagen: {str(e)}") |
|
st.error("Por favor, aseg煤rate de que el archivo es una imagen v谩lida") |
|
|
|
|
|
st.markdown("---") |
|
col1, col2, col3 = st.columns(3) |
|
with col1: |
|
st.markdown("**GnosticDev AI**") |
|
with col2: |
|
st.markdown("Versi贸n 1.0.0") |
|
with col3: |
|
st.markdown("Made with 鉂わ笍 by GnosticDev") |