gnosticdev's picture
Update app.py
59bfaac verified
raw
history blame
7.03 kB
import streamlit as st
import json
import requests
from streamlit_option_menu import option_menu
from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
from PIL import Image
# Configuraci贸n de la p谩gina
st.set_page_config(
page_title="GnosticDev AI",
page_icon="馃",
layout="wide", # Aseg煤rate de que el layout sea 'wide'
initial_sidebar_state="expanded",
)
# Men煤 de opciones en el lateral izquierdo
selected = option_menu(
menu_title="Men煤",
options=["System Prompt", "Chatbot", "Image Captioning"],
icons=["gear", "chat", "camera"],
default_index=0,
orientation="vertical" # Aseg煤rate de que est茅 en vertical
)
# Inicializar el estado de la sesi贸n
if 'cookie_chat_history' not in st.session_state:
st.session_state.cookie_chat_history = json.dumps([])
if 'cookie_urls' not in st.session_state:
st.session_state.cookie_urls = []
if 'system_prompt' not in st.session_state:
st.session_state.system_prompt = ""
# Funci贸n para guardar el historial en cookies
def save_chat_history(history):
serializable_history = []
for message in history:
serializable_history.append({
"role": message.role,
"text": message.parts[0].text
})
st.session_state.cookie_chat_history = json.dumps(serializable_history)
# Funci贸n para cargar el historial desde cookies
def load_chat_history():
if 'cookie_chat_history' in st.session_state:
try:
history = json.loads(st.session_state.cookie_chat_history)
model = load_gemini_pro()
chat = model.start_chat(history=[])
if st.session_state.system_prompt:
chat.send_message(st.session_state.system_prompt)
for message in history:
if message["role"] != "model" or not message["text"].startswith(st.session_state.system_prompt):
chat.send_message(message["text"])
return chat
except Exception as e:
st.error(f"Error cargando el historial: {e}")
return None
# Funci贸n para descargar el historial del chat
def download_chat_history(history):
chat_text = ""
for message in history:
chat_text += f"{message.role}: {message.parts[0].text}\n"
return chat_text
# Funci贸n para obtener contenido de URLs
def fetch_url_content(url):
try:
response = requests.get(url)
response.raise_for_status() # Lanza un error si la respuesta es un c贸digo de error
return response.text # Devuelve el contenido de la p谩gina
except requests.RequestException as e:
st.error(f"Error al acceder a {url}: {e}")
return None
if selected == "System Prompt":
st.title("Configuraci贸n del System Prompt")
new_system_prompt = st.text_area(
"Ingresa las instrucciones para el AI (System Prompt)",
value=st.session_state.system_prompt,
height=300,
help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI"
)
# Campo para ingresar URLs
urls_input = st.text_area(
"Ingresa URLs de informaci贸n y documentos (separadas por comas)",
value=", ".join(st.session_state.cookie_urls),
height=100,
help="Escribe aqu铆 las URLs que el AI puede usar como referencia, separadas por comas."
)
if st.button("Guardar System Prompt y URLs"):
st.session_state.system_prompt = new_system_prompt
st.session_state.cookie_urls = [url.strip() for url in urls_input.split(",") if url.strip()] # Guardar las URLs en una lista
if "chat_session" in st.session_state:
del st.session_state.chat_session
st.success("System Prompt y URLs actualizados con 茅xito!")
if st.session_state.system_prompt:
st.markdown("### System Prompt Actual:")
st.info(st.session_state.system_prompt)
if st.session_state.cookie_urls:
st.markdown("### URLs Guardadas:")
st.info(", ".join(st.session_state.cookie_urls))
elif selected == "Chatbot":
model = load_gemini_pro()
# Inicializar o cargar sesi贸n de chat
if "chat_session" not in st.session_state:
loaded_chat = load_chat_history()
if loaded_chat:
st.session_state.chat_session = loaded_chat
else:
st.session_state.chat_session = model.start_chat(history=[])
if st.session_state.system_prompt:
st.session_state.chat_session.send_message(st.session_state.system_prompt)
st.title("Gnosticdev Chatbot")
if st.session_state.system_prompt:
with st.expander("Ver System Prompt actual"):
st.info(st.session_state.system_prompt)
# Mostrar historial
for message in st.session_state.chat_session.history:
with st.chat_message(translate_role_to_streamlit(message.role)):
st.markdown(message.parts[0].text)
# Campo de entrada
user_prompt = st.chat_input("Preguntame algo...")
if user_prompt:
st.chat_message("user").markdown(user_prompt)
# Obtener las URLs guardadas
urls = st.session_state.get('cookie_urls', [])
fetched_contents = []
if urls:
# L贸gica para consultar las URLs y obtener informaci贸n
for url in urls:
content = fetch_url_content(url)
if content:
fetched_contents.append(content)
# Aqu铆 puedes procesar el contenido obtenido de las URLs
# Por ejemplo, podr铆as resumirlo o extraer informaci贸n relevante
combined_content = "\n\n".join(fetched_contents)
user_prompt += f"\n\nInformaci贸n adicional de URLs:\n{combined_content}"
# Enviar el mensaje del usuario al modelo
gemini_response = st.session_state.chat_session.send_message(user_prompt)
with st.chat_message("assistant"):
st.markdown(gemini_response.text)
# Guardar historial actualizado
save_chat_history(st.session_state.chat_session.history)
# Opci贸n para descargar el historial del chat
if st.button("Descargar Historial del Chat"):
chat_history = download_chat_history(st.session_state.chat_session.history)
st.download_button(
label="Descargar",
data=chat_history,
file_name="historial_chat.txt",
mime="text/plain"
)
elif selected == "Image Captioning":
st.title("Image Caption Generation馃摳")
upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
if upload_image and st.button("Generate"):
image = Image.open(upload_image)
col1, col2 = st.columns(2)
with col1:
st.image(image, caption="Uploaded Image", use_column_width=True)
default_prompt = "Write a caption for this image"
caption = gemini_pro_vision_responce(default_prompt, image)
with col2:
st.info(caption)