gnosticdev commited on
Commit
300e083
verified
1 Parent(s): 48344ea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -72
app.py CHANGED
@@ -1,28 +1,10 @@
1
  import os
2
  import streamlit as st
3
  import json
4
- import requests
5
- import re
6
- from bs4 import BeautifulSoup
7
  from streamlit_option_menu import option_menu
8
  from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
9
  from PIL import Image
10
 
11
- # Ruta para guardar el System Prompt
12
- SYSTEM_PROMPT_FILE = "system_prompt.json"
13
-
14
- # Funci贸n para cargar el System Prompt desde el archivo
15
- def load_system_prompt():
16
- if os.path.exists(SYSTEM_PROMPT_FILE):
17
- with open(SYSTEM_PROMPT_FILE, 'r') as f:
18
- return json.load(f).get("system_prompt", "")
19
- return ""
20
-
21
- # Funci贸n para guardar el System Prompt en el archivo
22
- def save_system_prompt(system_prompt):
23
- with open(SYSTEM_PROMPT_FILE, 'w') as f:
24
- json.dump({"system_prompt": system_prompt}, f)
25
-
26
  # Setting the page config
27
  st.set_page_config(
28
  page_title="GnosticDev AI",
@@ -31,18 +13,16 @@ st.set_page_config(
31
  initial_sidebar_state="expanded",
32
  )
33
 
34
- # Cargar el System Prompt guardado
35
- if "system_prompt" not in st.session_state:
36
- st.session_state.system_prompt = load_system_prompt()
37
-
38
  # Funci贸n para guardar el historial en cookies
39
  def save_chat_history(history):
 
40
  serializable_history = []
41
  for message in history:
42
  serializable_history.append({
43
  "role": message.role,
44
  "text": message.parts[0].text
45
  })
 
46
  st.session_state.cookie_chat_history = json.dumps(serializable_history)
47
 
48
  # Funci贸n para cargar el historial desde cookies
@@ -52,6 +32,7 @@ def load_chat_history():
52
  history = json.loads(st.session_state.cookie_chat_history)
53
  model = load_gemini_pro()
54
  chat = model.start_chat(history=[])
 
55
  if st.session_state.system_prompt:
56
  chat.send_message(st.session_state.system_prompt)
57
  for message in history:
@@ -62,6 +43,10 @@ def load_chat_history():
62
  st.error(f"Error cargando el historial: {e}")
63
  return None
64
 
 
 
 
 
65
  with st.sidebar:
66
  selected = option_menu(
67
  "GD AI",
@@ -70,7 +55,8 @@ with st.sidebar:
70
  icons=['gear', 'chat-dots-fill', 'image-fill'],
71
  default_index=0
72
  )
73
-
 
74
  if st.button("Borrar Historial"):
75
  if 'cookie_chat_history' in st.session_state:
76
  del st.session_state.cookie_chat_history
@@ -84,84 +70,66 @@ def translate_role_to_streamlit(user_role):
84
  else:
85
  return user_role
86
 
87
- def extract_urls(text):
88
- url_pattern = r"(https?://\S+)"
89
- urls = re.findall(url_pattern, text)
90
- return urls
91
-
92
- def fetch_url_content(url):
93
- try:
94
- response = requests.get(url, timeout=10) # Agregar timeout para evitar bloqueos
95
- response.raise_for_status()
96
- return response.text
97
- except requests.exceptions.RequestException as e:
98
- return f"Error al acceder a la URL '{url}': {e}"
99
-
100
- def process_url_content(content):
101
- try:
102
- soup = BeautifulSoup(content, "html.parser")
103
- # Extrae solo el texto del cuerpo principal, ignorando etiquetas de scripts y estilos
104
- text = soup.get_text(" ", strip=True)
105
- return text
106
- except Exception as e:
107
- return f"Error al procesar el contenido HTML: {e}"
108
-
109
- def process_urls_in_prompt(prompt):
110
- urls = extract_urls(prompt)
111
- new_prompt = prompt
112
- for url in urls:
113
- content = fetch_url_content(url)
114
- if content.startswith("Error"): # Gestion de errores al obtener el contenido de la URL
115
- new_prompt = new_prompt.replace(url, content)
116
- else:
117
- processed_content = process_url_content(content)
118
- new_prompt = new_prompt.replace(url, processed_content)
119
- return new_prompt
120
-
121
  if selected == "System Prompt":
122
  st.title("Configuraci贸n del System Prompt")
 
123
  new_system_prompt = st.text_area(
124
- "Ingresa las instrucciones para el AI (System Prompt), incluyendo URLs",
125
  value=st.session_state.system_prompt,
126
  height=300,
127
- help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI. Puedes incluir URLs."
128
  )
129
-
130
  if st.button("Guardar System Prompt"):
131
- processed_prompt = process_urls_in_prompt(new_system_prompt)
132
- st.session_state.system_prompt = processed_prompt
133
- save_system_prompt(processed_prompt) # Guardar en archivo JSON
134
  if "chat_session" in st.session_state:
135
  del st.session_state.chat_session
136
  st.success("System Prompt actualizado con 茅xito!")
137
-
138
  if st.session_state.system_prompt:
139
  st.markdown("### System Prompt Actual:")
140
  st.info(st.session_state.system_prompt)
141
 
142
  elif selected == "Chatbot":
143
  model = load_gemini_pro()
 
 
144
  if "chat_session" not in st.session_state:
145
  loaded_chat = load_chat_history()
146
  if loaded_chat:
147
  st.session_state.chat_session = loaded_chat
148
  else:
149
  st.session_state.chat_session = model.start_chat(history=[])
150
- # No enviar autom谩ticamente el system_prompt aqu铆
 
151
 
152
  st.title("Gnosticdev Chatbot")
 
 
 
 
 
 
 
 
 
 
 
153
  user_prompt = st.chat_input("Preguntame algo...")
154
  if user_prompt:
155
- processed_user_prompt = process_urls_in_prompt(user_prompt)
156
- st.chat_message("user").markdown(processed_user_prompt)
157
- gemini_response = st.session_state.chat_session.send_message(processed_user_prompt)
158
  with st.chat_message("assistant"):
159
  st.markdown(gemini_response.text)
 
 
160
  save_chat_history(st.session_state.chat_session.history)
161
 
162
  elif selected == "Image Captioning":
163
  st.title("Image Caption Generation馃摳")
164
  upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
 
165
  if upload_image and st.button("Generate"):
166
  image = Image.open(upload_image)
167
  col1, col2 = st.columns(2)
@@ -170,7 +138,4 @@ elif selected == "Image Captioning":
170
  default_prompt = "Write a caption for this image"
171
  caption = gemini_pro_vision_responce(default_prompt, image)
172
  with col2:
173
- st.info(caption)
174
-
175
-
176
- # Fin del archivo
 
1
  import os
2
  import streamlit as st
3
  import json
 
 
 
4
  from streamlit_option_menu import option_menu
5
  from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
6
  from PIL import Image
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  # Setting the page config
9
  st.set_page_config(
10
  page_title="GnosticDev AI",
 
13
  initial_sidebar_state="expanded",
14
  )
15
 
 
 
 
 
16
  # Funci贸n para guardar el historial en cookies
17
  def save_chat_history(history):
18
+ # Convertir el historial a un formato serializable
19
  serializable_history = []
20
  for message in history:
21
  serializable_history.append({
22
  "role": message.role,
23
  "text": message.parts[0].text
24
  })
25
+ # Guardar en cookie
26
  st.session_state.cookie_chat_history = json.dumps(serializable_history)
27
 
28
  # Funci贸n para cargar el historial desde cookies
 
32
  history = json.loads(st.session_state.cookie_chat_history)
33
  model = load_gemini_pro()
34
  chat = model.start_chat(history=[])
35
+ # Reconstruir el historial
36
  if st.session_state.system_prompt:
37
  chat.send_message(st.session_state.system_prompt)
38
  for message in history:
 
43
  st.error(f"Error cargando el historial: {e}")
44
  return None
45
 
46
+ # Inicializar estados
47
+ if "system_prompt" not in st.session_state:
48
+ st.session_state.system_prompt = st.session_state.get('cookie_system_prompt', "")
49
+
50
  with st.sidebar:
51
  selected = option_menu(
52
  "GD AI",
 
55
  icons=['gear', 'chat-dots-fill', 'image-fill'],
56
  default_index=0
57
  )
58
+
59
+ # Bot贸n para borrar historial
60
  if st.button("Borrar Historial"):
61
  if 'cookie_chat_history' in st.session_state:
62
  del st.session_state.cookie_chat_history
 
70
  else:
71
  return user_role
72
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
  if selected == "System Prompt":
74
  st.title("Configuraci贸n del System Prompt")
75
+
76
  new_system_prompt = st.text_area(
77
+ "Ingresa las instrucciones para el AI (System Prompt)",
78
  value=st.session_state.system_prompt,
79
  height=300,
80
+ help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI"
81
  )
82
+
83
  if st.button("Guardar System Prompt"):
84
+ st.session_state.system_prompt = new_system_prompt
85
+ st.session_state.cookie_system_prompt = new_system_prompt # Guardar en cookie
 
86
  if "chat_session" in st.session_state:
87
  del st.session_state.chat_session
88
  st.success("System Prompt actualizado con 茅xito!")
89
+
90
  if st.session_state.system_prompt:
91
  st.markdown("### System Prompt Actual:")
92
  st.info(st.session_state.system_prompt)
93
 
94
  elif selected == "Chatbot":
95
  model = load_gemini_pro()
96
+
97
+ # Inicializar o cargar sesi贸n de chat
98
  if "chat_session" not in st.session_state:
99
  loaded_chat = load_chat_history()
100
  if loaded_chat:
101
  st.session_state.chat_session = loaded_chat
102
  else:
103
  st.session_state.chat_session = model.start_chat(history=[])
104
+ if st.session_state.system_prompt:
105
+ st.session_state.chat_session.send_message(st.session_state.system_prompt)
106
 
107
  st.title("Gnosticdev Chatbot")
108
+
109
+ if st.session_state.system_prompt:
110
+ with st.expander("Ver System Prompt actual"):
111
+ st.info(st.session_state.system_prompt)
112
+
113
+ # Mostrar historial
114
+ for message in st.session_state.chat_session.history:
115
+ with st.chat_message(translate_role_to_streamlit(message.role)):
116
+ st.markdown(message.parts[0].text)
117
+
118
+ # Campo de entrada
119
  user_prompt = st.chat_input("Preguntame algo...")
120
  if user_prompt:
121
+ st.chat_message("user").markdown(user_prompt)
122
+ gemini_response = st.session_state.chat_session.send_message(user_prompt)
 
123
  with st.chat_message("assistant"):
124
  st.markdown(gemini_response.text)
125
+
126
+ # Guardar historial actualizado
127
  save_chat_history(st.session_state.chat_session.history)
128
 
129
  elif selected == "Image Captioning":
130
  st.title("Image Caption Generation馃摳")
131
  upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
132
+
133
  if upload_image and st.button("Generate"):
134
  image = Image.open(upload_image)
135
  col1, col2 = st.columns(2)
 
138
  default_prompt = "Write a caption for this image"
139
  caption = gemini_pro_vision_responce(default_prompt, image)
140
  with col2:
141
+ st.info(caption)