Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,29 @@
|
|
1 |
import os
|
2 |
import streamlit as st
|
3 |
import json
|
|
|
|
|
|
|
4 |
from streamlit_option_menu import option_menu
|
5 |
from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
|
6 |
from PIL import Image
|
7 |
-
import time
|
8 |
|
9 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
st.set_page_config(
|
11 |
page_title="GnosticDev AI",
|
12 |
page_icon="馃",
|
@@ -14,203 +31,144 @@ st.set_page_config(
|
|
14 |
initial_sidebar_state="expanded",
|
15 |
)
|
16 |
|
17 |
-
#
|
18 |
-
|
19 |
-
|
20 |
-
Translate Gemini message roles to Streamlit chat roles
|
21 |
-
"""
|
22 |
-
role_mapping = {
|
23 |
-
'model': 'assistant',
|
24 |
-
'user': 'user'
|
25 |
-
}
|
26 |
-
return role_mapping.get(role, 'assistant')
|
27 |
-
|
28 |
-
# Archivo para almacenamiento permanente
|
29 |
-
PROMPTS_FILE = "training_prompts.json"
|
30 |
-
|
31 |
-
# Funciones para manejar el almacenamiento permanente
|
32 |
-
def load_prompts():
|
33 |
-
try:
|
34 |
-
if os.path.exists(PROMPTS_FILE):
|
35 |
-
with open(PROMPTS_FILE, 'r', encoding='utf-8') as file:
|
36 |
-
return json.load(file)
|
37 |
-
except Exception as e:
|
38 |
-
st.error(f"Error cargando prompts: {e}")
|
39 |
-
return {"prompts": [], "current_prompt": ""}
|
40 |
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
|
|
|
|
|
|
47 |
|
48 |
-
#
|
49 |
-
|
50 |
-
st.session_state
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
with st.sidebar:
|
53 |
selected = option_menu(
|
54 |
"GD AI",
|
55 |
-
["System
|
56 |
menu_icon="robot",
|
57 |
icons=['gear', 'chat-dots-fill', 'image-fill'],
|
58 |
default_index=0
|
59 |
)
|
60 |
|
61 |
-
if
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
"
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
col1, col2 = st.columns([1, 2])
|
73 |
-
with col1:
|
74 |
-
if st.button("A帽adir Nuevo Prompt"):
|
75 |
-
if new_prompt and new_prompt not in st.session_state.prompts_data["prompts"]:
|
76 |
-
st.session_state.prompts_data["prompts"].append(new_prompt)
|
77 |
-
save_prompts(st.session_state.prompts_data)
|
78 |
-
st.success("Nuevo prompt a帽adido!")
|
79 |
-
time.sleep(1)
|
80 |
-
st.rerun()
|
81 |
-
|
82 |
-
# Lista de prompts guardados
|
83 |
-
st.markdown("### Prompts Guardados")
|
84 |
-
for i, prompt in enumerate(st.session_state.prompts_data["prompts"]):
|
85 |
-
with st.expander(f"Prompt {i+1}"):
|
86 |
-
st.text_area("", prompt, height=100, key=f"prompt_{i}", disabled=True)
|
87 |
-
col1, col2, col3 = st.columns([1, 1, 1])
|
88 |
-
with col1:
|
89 |
-
if st.button("Usar este prompt", key=f"use_{i}"):
|
90 |
-
st.session_state.prompts_data["current_prompt"] = prompt
|
91 |
-
save_prompts(st.session_state.prompts_data)
|
92 |
-
if "chat_session" in st.session_state:
|
93 |
-
del st.session_state.chat_session
|
94 |
-
st.success("Prompt activado!")
|
95 |
-
with col2:
|
96 |
-
if st.button("Editar", key=f"edit_{i}"):
|
97 |
-
st.session_state.editing_prompt = i
|
98 |
-
st.session_state.editing_text = prompt
|
99 |
-
with col3:
|
100 |
-
if st.button("Eliminar", key=f"delete_{i}"):
|
101 |
-
st.session_state.prompts_data["prompts"].pop(i)
|
102 |
-
save_prompts(st.session_state.prompts_data)
|
103 |
-
st.success("Prompt eliminado!")
|
104 |
-
time.sleep(1)
|
105 |
-
st.rerun()
|
106 |
-
|
107 |
-
# Mostrar prompt actual
|
108 |
-
st.markdown("### Prompt Actual")
|
109 |
-
current_prompt = st.session_state.prompts_data.get("current_prompt", "")
|
110 |
-
if current_prompt:
|
111 |
-
st.info(current_prompt)
|
112 |
else:
|
113 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
114 |
|
115 |
elif selected == "Chatbot":
|
116 |
model = load_gemini_pro()
|
117 |
-
|
118 |
-
# Initialize chat session with current prompt
|
119 |
if "chat_session" not in st.session_state:
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
with st.chat_message("assistant"):
|
129 |
-
st.markdown(welcome_message)
|
130 |
-
except Exception as e:
|
131 |
-
st.error(f"Error al mostrar mensaje de bienvenida: {str(e)}")
|
132 |
|
133 |
st.title("Gnosticdev Chatbot")
|
134 |
-
|
135 |
-
# Display chat history
|
136 |
-
if hasattr(st.session_state.chat_session, 'history'):
|
137 |
-
for message in st.session_state.chat_session.history:
|
138 |
-
# Skip displaying the system prompt message
|
139 |
-
if message.role == 'model' and message.parts[0].text == st.session_state.prompts_data.get("current_prompt"):
|
140 |
-
continue
|
141 |
-
role = translate_role_to_streamlit(message.role)
|
142 |
-
with st.chat_message(role):
|
143 |
-
st.markdown(message.parts[0].text)
|
144 |
-
|
145 |
-
# Chat input
|
146 |
user_prompt = st.chat_input("Preguntame algo...")
|
147 |
if user_prompt:
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
st.error(f"Error en la respuesta: {str(e)}")
|
155 |
|
156 |
elif selected == "Image Captioning":
|
157 |
st.title("Image Caption Generation馃摳")
|
158 |
-
|
159 |
-
# Add custom prompt option
|
160 |
-
use_custom_prompt = st.checkbox("Usar prompt personalizado")
|
161 |
-
if use_custom_prompt:
|
162 |
-
custom_prompt = st.text_area(
|
163 |
-
"Escribe tu prompt personalizado",
|
164 |
-
value="Write a caption for this image",
|
165 |
-
help="Puedes personalizar las instrucciones para el an谩lisis de la imagen"
|
166 |
-
)
|
167 |
-
|
168 |
upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
st.text(image_info)
|
179 |
-
|
180 |
-
if st.button("Generate", key="generate_caption"):
|
181 |
-
with st.spinner("Generando descripci贸n..."):
|
182 |
-
prompt = custom_prompt if use_custom_prompt else "Write a caption for this image"
|
183 |
-
try:
|
184 |
-
caption = gemini_pro_vision_responce(prompt, image)
|
185 |
-
with col2:
|
186 |
-
st.success("隆Descripci贸n generada!")
|
187 |
-
st.info(caption)
|
188 |
-
|
189 |
-
# Opci贸n para guardar la descripci贸n
|
190 |
-
if st.button("Guardar descripci贸n"):
|
191 |
-
timestamp = time.strftime("%Y%m%d-%H%M%S")
|
192 |
-
filename = f"caption_{timestamp}.txt"
|
193 |
-
try:
|
194 |
-
with open(filename, "w", encoding="utf-8") as f:
|
195 |
-
f.write(caption)
|
196 |
-
st.success(f"Descripci贸n guardada en {filename}")
|
197 |
-
except Exception as e:
|
198 |
-
st.error(f"Error al guardar la descripci贸n: {str(e)}")
|
199 |
-
|
200 |
-
except Exception as e:
|
201 |
-
st.error(f"Error al generar la descripci贸n: {str(e)}")
|
202 |
-
st.error("Por favor, intenta con otra imagen o revisa tu conexi贸n")
|
203 |
-
|
204 |
-
except Exception as e:
|
205 |
-
st.error(f"Error al procesar la imagen: {str(e)}")
|
206 |
-
st.error("Por favor, aseg煤rate de que el archivo es una imagen v谩lida")
|
207 |
-
|
208 |
-
# A帽adir footer
|
209 |
-
st.markdown("---")
|
210 |
-
col1, col2, col3 = st.columns(3)
|
211 |
-
with col1:
|
212 |
-
st.markdown("**GnosticDev AI**")
|
213 |
-
with col2:
|
214 |
-
st.markdown("Versi贸n 1.0.0")
|
215 |
-
with col3:
|
216 |
-
st.markdown("Made with 鉂わ笍 by GnosticDev")
|
|
|
1 |
import os
|
2 |
import streamlit as st
|
3 |
import json
|
4 |
+
import requests
|
5 |
+
import re
|
6 |
+
from bs4 import BeautifulSoup
|
7 |
from streamlit_option_menu import option_menu
|
8 |
from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
|
9 |
from PIL import Image
|
|
|
10 |
|
11 |
+
# Ruta para guardar el System Prompt
|
12 |
+
SYSTEM_PROMPT_FILE = "system_prompt.json"
|
13 |
+
|
14 |
+
# Funci贸n para cargar el System Prompt desde el archivo
|
15 |
+
def load_system_prompt():
|
16 |
+
if os.path.exists(SYSTEM_PROMPT_FILE):
|
17 |
+
with open(SYSTEM_PROMPT_FILE, 'r') as f:
|
18 |
+
return json.load(f).get("system_prompt", "")
|
19 |
+
return ""
|
20 |
+
|
21 |
+
# Funci贸n para guardar el System Prompt en el archivo
|
22 |
+
def save_system_prompt(system_prompt):
|
23 |
+
with open(SYSTEM_PROMPT_FILE, 'w') as f:
|
24 |
+
json.dump({"system_prompt": system_prompt}, f)
|
25 |
+
|
26 |
+
# Setting the page config
|
27 |
st.set_page_config(
|
28 |
page_title="GnosticDev AI",
|
29 |
page_icon="馃",
|
|
|
31 |
initial_sidebar_state="expanded",
|
32 |
)
|
33 |
|
34 |
+
# Cargar el System Prompt guardado
|
35 |
+
if "system_prompt" not in st.session_state:
|
36 |
+
st.session_state.system_prompt = load_system_prompt()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
|
38 |
+
# Funci贸n para guardar el historial en cookies
|
39 |
+
def save_chat_history(history):
|
40 |
+
serializable_history = []
|
41 |
+
for message in history:
|
42 |
+
serializable_history.append({
|
43 |
+
"role": message.role,
|
44 |
+
"text": message.parts[0].text
|
45 |
+
})
|
46 |
+
st.session_state.cookie_chat_history = json.dumps(serializable_history)
|
47 |
|
48 |
+
# Funci贸n para cargar el historial desde cookies
|
49 |
+
def load_chat_history():
|
50 |
+
if 'cookie_chat_history' in st.session_state:
|
51 |
+
try:
|
52 |
+
history = json.loads(st.session_state.cookie_chat_history)
|
53 |
+
model = load_gemini_pro()
|
54 |
+
chat = model.start_chat(history=[])
|
55 |
+
# NO enviar el system_prompt al inicio aqu铆
|
56 |
+
for message in history:
|
57 |
+
if message["role"] != "model":
|
58 |
+
chat.send_message(message["text"])
|
59 |
+
return chat
|
60 |
+
except Exception as e:
|
61 |
+
st.error(f"Error cargando el historial: {e}")
|
62 |
+
return None
|
63 |
|
64 |
with st.sidebar:
|
65 |
selected = option_menu(
|
66 |
"GD AI",
|
67 |
+
["System Prompt", "Chatbot", "Image Captioning"],
|
68 |
menu_icon="robot",
|
69 |
icons=['gear', 'chat-dots-fill', 'image-fill'],
|
70 |
default_index=0
|
71 |
)
|
72 |
|
73 |
+
if st.button("Borrar Historial"):
|
74 |
+
if 'cookie_chat_history' in st.session_state:
|
75 |
+
del st.session_state.cookie_chat_history
|
76 |
+
if 'chat_session' in st.session_state:
|
77 |
+
del st.session_state.chat_session
|
78 |
+
st.success("Historial borrado!")
|
79 |
+
|
80 |
+
def translate_role_to_streamlit(user_role):
|
81 |
+
if user_role == "model":
|
82 |
+
return "assistant"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
else:
|
84 |
+
return user_role
|
85 |
+
|
86 |
+
def extract_urls(text):
|
87 |
+
url_pattern = r"(https?://\S+)"
|
88 |
+
urls = re.findall(url_pattern, text)
|
89 |
+
return urls
|
90 |
+
|
91 |
+
def fetch_url_content(url):
|
92 |
+
try:
|
93 |
+
response = requests.get(url, timeout=10) # Agregar timeout para evitar bloqueos
|
94 |
+
response.raise_for_status()
|
95 |
+
return response.text
|
96 |
+
except requests.exceptions.RequestException as e:
|
97 |
+
return f"Error al acceder a la URL '{url}': {e}"
|
98 |
+
|
99 |
+
def process_url_content(content):
|
100 |
+
try:
|
101 |
+
soup = BeautifulSoup(content, "html.parser")
|
102 |
+
# Extrae solo el texto del cuerpo principal, ignorando etiquetas de scripts y estilos
|
103 |
+
text = soup.get_text(" ", strip=True)
|
104 |
+
return text
|
105 |
+
except Exception as e:
|
106 |
+
return f"Error al procesar el contenido HTML: {e}"
|
107 |
+
|
108 |
+
def process_urls_in_prompt(prompt):
|
109 |
+
urls = extract_urls(prompt)
|
110 |
+
new_prompt = prompt
|
111 |
+
for url in urls:
|
112 |
+
content = fetch_url_content(url)
|
113 |
+
if content.startswith("Error"): # Gestion de errores al obtener el contenido de la URL
|
114 |
+
new_prompt = new_prompt.replace(url, content)
|
115 |
+
else:
|
116 |
+
processed_content = process_url_content(content)
|
117 |
+
new_prompt = new_prompt.replace(url, processed_content)
|
118 |
+
return new_prompt
|
119 |
+
|
120 |
+
if selected == "System Prompt":
|
121 |
+
st.title("Configuraci贸n del System Prompt")
|
122 |
+
new_system_prompt = st.text_area(
|
123 |
+
"Ingresa las instrucciones para el AI (System Prompt), incluyendo URLs",
|
124 |
+
value=st.session_state.system_prompt,
|
125 |
+
height=300,
|
126 |
+
help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI. Puedes incluir URLs."
|
127 |
+
)
|
128 |
+
|
129 |
+
if st.button("Guardar System Prompt"):
|
130 |
+
processed_prompt = process_urls_in_prompt(new_system_prompt)
|
131 |
+
st.session_state.system_prompt = processed_prompt
|
132 |
+
save_system_prompt(processed_prompt) # Guardar en archivo JSON
|
133 |
+
if "chat_session" in st.session_state:
|
134 |
+
del st.session_state.chat_session
|
135 |
+
st.success("System Prompt actualizado con 茅xito!")
|
136 |
+
|
137 |
+
if st.session_state.system_prompt:
|
138 |
+
st.markdown("### System Prompt Actual:")
|
139 |
+
st.info(st.session_state.system_prompt)
|
140 |
|
141 |
elif selected == "Chatbot":
|
142 |
model = load_gemini_pro()
|
|
|
|
|
143 |
if "chat_session" not in st.session_state:
|
144 |
+
loaded_chat = load_chat_history()
|
145 |
+
if loaded_chat:
|
146 |
+
st.session_state.chat_session = loaded_chat
|
147 |
+
else:
|
148 |
+
# Iniciar el chat con un saludo o mensaje de bienvenida simple
|
149 |
+
st.session_state.chat_session = model.start_chat(history=[])
|
150 |
+
st.chat_message("assistant").markdown("隆Hola! 驴En qu茅 puedo ayudarte hoy?")
|
151 |
+
# Solo se configura el behavior con el System Prompt, pero no se muestra en el inicio
|
|
|
|
|
|
|
|
|
152 |
|
153 |
st.title("Gnosticdev Chatbot")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
154 |
user_prompt = st.chat_input("Preguntame algo...")
|
155 |
if user_prompt:
|
156 |
+
processed_user_prompt = process_urls_in_prompt(user_prompt)
|
157 |
+
st.chat_message("user").markdown(processed_user_prompt)
|
158 |
+
gemini_response = st.session_state.chat_session.send_message(processed_user_prompt)
|
159 |
+
with st.chat_message("assistant"):
|
160 |
+
st.markdown(gemini_response.text)
|
161 |
+
save_chat_history(st.session_state.chat_session.history)
|
|
|
162 |
|
163 |
elif selected == "Image Captioning":
|
164 |
st.title("Image Caption Generation馃摳")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
165 |
upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
|
166 |
+
if upload_image and st.button("Generate"):
|
167 |
+
image = Image.open(upload_image)
|
168 |
+
col1, col2 = st.columns(2)
|
169 |
+
with col1:
|
170 |
+
st.image(image, caption="Uploaded Image", use_column_width=True)
|
171 |
+
default_prompt = "Write a caption for this image"
|
172 |
+
caption = gemini_pro_vision_responce(default_prompt, image)
|
173 |
+
with col2:
|
174 |
+
st.info(caption)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|