File size: 6,892 Bytes
fa92574 f54757c db13793 fa92574 db13793 fa92574 f54757c fa92574 f54757c db13793 f54757c db13793 fa92574 db13793 bf0d480 f54757c a36d2f3 f54757c a36d2f3 db13793 f54757c db13793 f54757c fa92574 f54757c a36d2f3 08bd785 f54757c 63655e9 08bd785 fa92574 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 |
import os
import streamlit as st
import json
from streamlit_option_menu import option_menu
from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
from PIL import Image
# Configuraci贸n de la p谩gina
st.set_page_config(
page_title="GnosticDev AI",
page_icon="馃",
layout="centered",
initial_sidebar_state="expanded",
)
# Funci贸n para guardar el historial en cookies
def save_chat_history(history):
serializable_history = []
for message in history:
serializable_history.append({
"role": message.role,
"text": message.parts[0].text
})
st.session_state.cookie_chat_history = json.dumps(serializable_history)
# Funci贸n para cargar el historial desde cookies
def load_chat_history():
if 'cookie_chat_history' in st.session_state:
try:
history = json.loads(st.session_state.cookie_chat_history)
model = load_gemini_pro()
chat = model.start_chat(history=[])
if st.session_state.system_prompt:
chat.send_message(st.session_state.system_prompt)
for message in history:
if message["role"] != "model" or not message["text"].startswith(st.session_state.system_prompt):
chat.send_message(message["text"])
return chat
except Exception as e:
st.error(f"Error cargando el historial: {e}")
return None
# Funci贸n para descargar el historial del chat
def download_chat_history(history):
chat_text = ""
for message in history:
chat_text += f"{message.role}: {message.parts[0].text}\n"
return chat_text
# Inicializar estados
if "system_prompt" not in st.session_state:
st.session_state.system_prompt = st.session_state.get('cookie_system_prompt', "")
if "cookie_urls" not in st.session_state:
st.session_state.cookie_urls = []
with st.sidebar:
selected = option_menu(
"GD AI",
["System Prompt", "Chatbot", "Image Captioning"],
menu_icon="robot",
icons=['gear', 'chat-dots-fill', 'image-fill'],
default_index=0
)
# Bot贸n para borrar historial
if st.button("Borrar Historial"):
if 'cookie_chat_history' in st.session_state:
del st.session_state.cookie_chat_history
if 'chat_session' in st.session_state:
del st.session_state.chat_session
st.success("Historial borrado!")
def translate_role_to_streamlit(user_role):
return "assistant" if user_role == "model" else user_role
# Aqu铆 comienza la l贸gica de la aplicaci贸n
if selected == "System Prompt":
st.title("Configuraci贸n del System Prompt")
new_system_prompt = st.text_area(
"Ingresa las instrucciones para el AI (System Prompt)",
value=st.session_state.system_prompt,
height=300,
help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI"
)
# Campo para ingresar URLs
urls_input = st.text_area(
"Ingresa URLs de informaci贸n y documentos (separadas por comas)",
value=", ".join(st.session_state.cookie_urls),
height=100,
help="Escribe aqu铆 las URLs que el AI puede usar como referencia, separadas por comas."
)
if st.button("Guardar System Prompt y URLs"):
st.session_state.system_prompt = new_system_prompt
st.session_state.cookie_system_prompt = new_system_prompt
st.session_state.cookie_urls = [url.strip() for url in urls_input.split(",") if url.strip()] # Guardar las URLs en una lista
if "chat_session" in st.session_state:
del st.session_state.chat_session
st.success("System Prompt y URLs actualizados con 茅xito!")
if st.session_state.system_prompt:
st.markdown("### System Prompt Actual:")
st.info(st.session_state.system_prompt)
if st.session_state.cookie_urls:
st.markdown("### URLs Guardadas:")
st.info(", ".join(st.session_state.cookie_urls))
elif selected == "Chatbot":
model = load_gemini_pro()
# Inicializar o cargar sesi贸n de chat
if "chat_session" not in st.session_state:
loaded_chat = load_chat_history()
if loaded_chat:
st.session_state.chat_session = loaded_chat
else:
st.session_state.chat_session = model.start_chat(history=[])
if st.session_state.system_prompt:
st.session_state.chat_session.send_message(st.session_state.system_prompt)
st.title("Gnosticdev Chatbot")
if st.session_state.system_prompt:
with st.expander("Ver System Prompt actual"):
st.info(st.session_state.system_prompt)
# Mostrar historial
for message in st.session_state.chat_session.history:
with st.chat_message(translate_role_to_streamlit(message.role)):
st.markdown(message.parts[0].text)
# Campo de entrada
user_prompt = st.chat_input("Preguntame algo...")
if user_prompt:
st.chat_message("user").markdown(user_prompt)
# Aqu铆 es donde puedes modificar la l贸gica para que el bot consulte las URLs
urls = st.session_state.get('cookie_urls', [])
if urls:
# L贸gica para consultar las URLs y obtener informaci贸n
# Esto es un ejemplo, necesitar谩s implementar la l贸gica real para hacer las solicitudes HTTP
for url in urls:
# Aqu铆 podr铆as hacer una solicitud HTTP a cada URL y procesar la respuesta
# Por ejemplo, usando requests.get(url) y luego analizando el contenido
pass
gemini_response = st.session_state.chat_session.send_message(user_prompt)
with st.chat_message("assistant"):
st.markdown(gemini_response.text)
# Guardar historial actualizado
save_chat_history(st.session_state.chat_session.history)
# Opci贸n para descargar el historial del chat
if st.button("Descargar Historial del Chat"):
chat_history = download_chat_history(st.session_state.chat_session.history)
st.download_button(
label="Descargar",
data=chat_history,
file_name="historial_chat.txt",
mime="text/plain"
)
elif selected == "Image Captioning":
st.title("Image Caption Generation馃摳")
upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
if upload_image and st.button("Generate"):
image = Image.open(upload_image)
col1, col2 = st.columns(2)
with col1:
st.image(image, caption="Uploaded Image", use_column_width=True)
default_prompt = "Write a caption for this image"
caption = gemini_pro_vision_responce(default_prompt, image)
with col2:
st.info(caption)
# Fin del script
|