gnosticdev commited on
Commit
ea092fd
verified
1 Parent(s): 4b02bb7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -27
app.py CHANGED
@@ -1,8 +1,11 @@
1
  import os
2
  import streamlit as st
3
  import json
 
 
 
4
  from streamlit_option_menu import option_menu
5
- from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
6
  from PIL import Image
7
 
8
  # Setting the page config
@@ -13,26 +16,23 @@ st.set_page_config(
13
  initial_sidebar_state="expanded",
14
  )
15
 
16
- # Funci贸n para guardar el historial en cookies
17
  def save_chat_history(history):
18
- # Convertir el historial a un formato serializable
19
  serializable_history = []
20
  for message in history:
21
  serializable_history.append({
22
  "role": message.role,
23
  "text": message.parts[0].text
24
  })
25
- # Guardar en cookie
26
  st.session_state.cookie_chat_history = json.dumps(serializable_history)
27
 
28
- # Funci贸n para cargar el historial desde cookies
29
  def load_chat_history():
30
  if 'cookie_chat_history' in st.session_state:
31
  try:
32
  history = json.loads(st.session_state.cookie_chat_history)
33
  model = load_gemini_pro()
34
  chat = model.start_chat(history=[])
35
- # Reconstruir el historial
36
  if st.session_state.system_prompt:
37
  chat.send_message(st.session_state.system_prompt)
38
  for message in history:
@@ -43,7 +43,7 @@ def load_chat_history():
43
  st.error(f"Error cargando el historial: {e}")
44
  return None
45
 
46
- # Inicializar estados
47
  if "system_prompt" not in st.session_state:
48
  st.session_state.system_prompt = st.session_state.get('cookie_system_prompt', "")
49
 
@@ -55,8 +55,7 @@ with st.sidebar:
55
  icons=['gear', 'chat-dots-fill', 'image-fill'],
56
  default_index=0
57
  )
58
-
59
- # Bot贸n para borrar historial
60
  if st.button("Borrar Historial"):
61
  if 'cookie_chat_history' in st.session_state:
62
  del st.session_state.cookie_chat_history
@@ -70,31 +69,66 @@ def translate_role_to_streamlit(user_role):
70
  else:
71
  return user_role
72
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
  if selected == "System Prompt":
74
  st.title("Configuraci贸n del System Prompt")
75
-
76
  new_system_prompt = st.text_area(
77
- "Ingresa las instrucciones para el AI (System Prompt)",
78
  value=st.session_state.system_prompt,
79
  height=300,
80
- help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI"
81
  )
82
-
83
  if st.button("Guardar System Prompt"):
84
- st.session_state.system_prompt = new_system_prompt
85
- st.session_state.cookie_system_prompt = new_system_prompt # Guardar en cookie
 
86
  if "chat_session" in st.session_state:
87
  del st.session_state.chat_session
88
  st.success("System Prompt actualizado con 茅xito!")
89
-
90
  if st.session_state.system_prompt:
91
  st.markdown("### System Prompt Actual:")
92
  st.info(st.session_state.system_prompt)
93
 
 
94
  elif selected == "Chatbot":
95
  model = load_gemini_pro()
96
-
97
- # Inicializar o cargar sesi贸n de chat
98
  if "chat_session" not in st.session_state:
99
  loaded_chat = load_chat_history()
100
  if loaded_chat:
@@ -105,31 +139,26 @@ elif selected == "Chatbot":
105
  st.session_state.chat_session.send_message(st.session_state.system_prompt)
106
 
107
  st.title("Gnosticdev Chatbot")
108
-
109
  if st.session_state.system_prompt:
110
  with st.expander("Ver System Prompt actual"):
111
  st.info(st.session_state.system_prompt)
112
-
113
- # Mostrar historial
114
  for message in st.session_state.chat_session.history:
115
  with st.chat_message(translate_role_to_streamlit(message.role)):
116
  st.markdown(message.parts[0].text)
117
 
118
- # Campo de entrada
119
  user_prompt = st.chat_input("Preguntame algo...")
120
  if user_prompt:
121
- st.chat_message("user").markdown(user_prompt)
122
- gemini_response = st.session_state.chat_session.send_message(user_prompt)
 
123
  with st.chat_message("assistant"):
124
  st.markdown(gemini_response.text)
125
-
126
- # Guardar historial actualizado
127
  save_chat_history(st.session_state.chat_session.history)
128
 
 
129
  elif selected == "Image Captioning":
130
  st.title("Image Caption Generation馃摳")
131
  upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
132
-
133
  if upload_image and st.button("Generate"):
134
  image = Image.open(upload_image)
135
  col1, col2 = st.columns(2)
 
1
  import os
2
  import streamlit as st
3
  import json
4
+ import requests
5
+ import re
6
+ from bs4 import BeautifulSoup
7
  from streamlit_option_menu import option_menu
8
+ from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce) # Aseg煤rate de que este archivo existe y contiene las funciones
9
  from PIL import Image
10
 
11
  # Setting the page config
 
16
  initial_sidebar_state="expanded",
17
  )
18
 
19
+ # Funci贸n para guardar el historial en cookies (sin cambios)
20
  def save_chat_history(history):
 
21
  serializable_history = []
22
  for message in history:
23
  serializable_history.append({
24
  "role": message.role,
25
  "text": message.parts[0].text
26
  })
 
27
  st.session_state.cookie_chat_history = json.dumps(serializable_history)
28
 
29
+ # Funci贸n para cargar el historial desde cookies (sin cambios)
30
  def load_chat_history():
31
  if 'cookie_chat_history' in st.session_state:
32
  try:
33
  history = json.loads(st.session_state.cookie_chat_history)
34
  model = load_gemini_pro()
35
  chat = model.start_chat(history=[])
 
36
  if st.session_state.system_prompt:
37
  chat.send_message(st.session_state.system_prompt)
38
  for message in history:
 
43
  st.error(f"Error cargando el historial: {e}")
44
  return None
45
 
46
+ # Inicializar estados (sin cambios)
47
  if "system_prompt" not in st.session_state:
48
  st.session_state.system_prompt = st.session_state.get('cookie_system_prompt', "")
49
 
 
55
  icons=['gear', 'chat-dots-fill', 'image-fill'],
56
  default_index=0
57
  )
58
+
 
59
  if st.button("Borrar Historial"):
60
  if 'cookie_chat_history' in st.session_state:
61
  del st.session_state.cookie_chat_history
 
69
  else:
70
  return user_role
71
 
72
+
73
+ def extract_urls(text):
74
+ url_pattern = r"(https?://\S+)"
75
+ urls = re.findall(url_pattern, text)
76
+ return urls
77
+
78
+ def fetch_url_content(url):
79
+ try:
80
+ response = requests.get(url, timeout=10) # Agregar timeout para evitar bloqueos
81
+ response.raise_for_status()
82
+ return response.text
83
+ except requests.exceptions.RequestException as e:
84
+ return f"Error al acceder a la URL '{url}': {e}"
85
+
86
+ def process_url_content(content):
87
+ try:
88
+ soup = BeautifulSoup(content, "html.parser")
89
+ # Extrae solo el texto del cuerpo principal, ignorando etiquetas de scripts y estilos
90
+ text = soup.get_text(" ", strip=True)
91
+ return text
92
+ except Exception as e:
93
+ return f"Error al procesar el contenido HTML: {e}"
94
+
95
+
96
+ def process_urls_in_prompt(prompt):
97
+ urls = extract_urls(prompt)
98
+ new_prompt = prompt
99
+ for url in urls:
100
+ content = fetch_url_content(url)
101
+ if content.startswith("Error"): #Gestion de errores al obtener el contenido de la URL
102
+ new_prompt = new_prompt.replace(url, content)
103
+ else:
104
+ processed_content = process_url_content(content)
105
+ new_prompt = new_prompt.replace(url, processed_content)
106
+ return new_prompt
107
+
108
  if selected == "System Prompt":
109
  st.title("Configuraci贸n del System Prompt")
 
110
  new_system_prompt = st.text_area(
111
+ "Ingresa las instrucciones para el AI (System Prompt), incluyendo URLs",
112
  value=st.session_state.system_prompt,
113
  height=300,
114
+ help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI. Puedes incluir URLs."
115
  )
116
+
117
  if st.button("Guardar System Prompt"):
118
+ processed_prompt = process_urls_in_prompt(new_system_prompt)
119
+ st.session_state.system_prompt = processed_prompt
120
+ st.session_state.cookie_system_prompt = processed_prompt
121
  if "chat_session" in st.session_state:
122
  del st.session_state.chat_session
123
  st.success("System Prompt actualizado con 茅xito!")
124
+
125
  if st.session_state.system_prompt:
126
  st.markdown("### System Prompt Actual:")
127
  st.info(st.session_state.system_prompt)
128
 
129
+
130
  elif selected == "Chatbot":
131
  model = load_gemini_pro()
 
 
132
  if "chat_session" not in st.session_state:
133
  loaded_chat = load_chat_history()
134
  if loaded_chat:
 
139
  st.session_state.chat_session.send_message(st.session_state.system_prompt)
140
 
141
  st.title("Gnosticdev Chatbot")
 
142
  if st.session_state.system_prompt:
143
  with st.expander("Ver System Prompt actual"):
144
  st.info(st.session_state.system_prompt)
 
 
145
  for message in st.session_state.chat_session.history:
146
  with st.chat_message(translate_role_to_streamlit(message.role)):
147
  st.markdown(message.parts[0].text)
148
 
 
149
  user_prompt = st.chat_input("Preguntame algo...")
150
  if user_prompt:
151
+ processed_user_prompt = process_urls_in_prompt(user_prompt)
152
+ st.chat_message("user").markdown(processed_user_prompt)
153
+ gemini_response = st.session_state.chat_session.send_message(processed_user_prompt)
154
  with st.chat_message("assistant"):
155
  st.markdown(gemini_response.text)
 
 
156
  save_chat_history(st.session_state.chat_session.history)
157
 
158
+
159
  elif selected == "Image Captioning":
160
  st.title("Image Caption Generation馃摳")
161
  upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
 
162
  if upload_image and st.button("Generate"):
163
  image = Image.open(upload_image)
164
  col1, col2 = st.columns(2)