gnosticdev commited on
Commit
b6dc07e
verified
1 Parent(s): 4d3eb50

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -66
app.py CHANGED
@@ -8,22 +8,7 @@ from streamlit_option_menu import option_menu
8
  from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
9
  from PIL import Image
10
 
11
- # Ruta para guardar el System Prompt
12
- SYSTEM_PROMPT_FILE = "system_prompt.json"
13
-
14
- # Funci贸n para cargar el System Prompt desde el archivo
15
- def load_system_prompt():
16
- if os.path.exists(SYSTEM_PROMPT_FILE):
17
- with open(SYSTEM_PROMPT_FILE, 'r') as f:
18
- return json.load(f).get("system_prompt", "")
19
- return ""
20
-
21
- # Funci贸n para guardar el System Prompt en el archivo
22
- def save_system_prompt(system_prompt):
23
- with open(SYSTEM_PROMPT_FILE, 'w') as f:
24
- json.dump({"system_prompt": system_prompt}, f)
25
-
26
- # Setting the page config
27
  st.set_page_config(
28
  page_title="GnosticDev AI",
29
  page_icon="馃",
@@ -31,10 +16,6 @@ st.set_page_config(
31
  initial_sidebar_state="expanded",
32
  )
33
 
34
- # Cargar el System Prompt guardado
35
- if "system_prompt" not in st.session_state:
36
- st.session_state.system_prompt = load_system_prompt()
37
-
38
  # Funci贸n para guardar el historial en cookies
39
  def save_chat_history(history):
40
  serializable_history = []
@@ -62,6 +43,10 @@ def load_chat_history():
62
  st.error(f"Error cargando el historial: {e}")
63
  return None
64
 
 
 
 
 
65
  with st.sidebar:
66
  selected = option_menu(
67
  "GD AI",
@@ -79,10 +64,7 @@ with st.sidebar:
79
  st.success("Historial borrado!")
80
 
81
  def translate_role_to_streamlit(user_role):
82
- if user_role == "model":
83
- return "assistant"
84
- else:
85
- return user_role
86
 
87
  def extract_urls(text):
88
  url_pattern = r"(https?://\S+)"
@@ -91,7 +73,7 @@ def extract_urls(text):
91
 
92
  def fetch_url_content(url):
93
  try:
94
- response = requests.get(url, timeout=10) # Agregar timeout para evitar bloqueos
95
  response.raise_for_status()
96
  return response.text
97
  except requests.exceptions.RequestException as e:
@@ -100,7 +82,6 @@ def fetch_url_content(url):
100
  def process_url_content(content):
101
  try:
102
  soup = BeautifulSoup(content, "html.parser")
103
- # Extrae solo el texto del cuerpo principal, ignorando etiquetas de scripts y estilos
104
  text = soup.get_text(" ", strip=True)
105
  return text
106
  except Exception as e:
@@ -111,26 +92,17 @@ def process_urls_in_prompt(prompt):
111
  new_prompt = prompt
112
  for url in urls:
113
  content = fetch_url_content(url)
114
- if content.startswith("Error"): # Gestion de errores al obtener el contenido de la URL
115
  new_prompt = new_prompt.replace(url, content)
116
  else:
117
  processed_content = process_url_content(content)
118
  new_prompt = new_prompt.replace(url, processed_content)
119
  return new_prompt
120
 
121
- if selected == "System Prompt":
122
- st.title("Configuraci贸n del System Prompt")
123
- new_system_prompt = st.text_area(
124
- "Ingresa las instrucciones para el AI (System Prompt), incluyendo URLs",
125
- value=st.session_state.system_prompt,
126
- height=300,
127
- help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI. Puedes incluir URLs."
128
- )
129
-
130
  if st.button("Guardar System Prompt"):
131
  processed_prompt = process_urls_in_prompt(new_system_prompt)
132
  st.session_state.system_prompt = processed_prompt
133
- save_system_prompt(processed_prompt) # Guardar en archivo JSON
134
  if "chat_session" in st.session_state:
135
  del st.session_state.chat_session
136
  st.success("System Prompt actualizado con 茅xito!")
@@ -147,43 +119,46 @@ elif selected == "Chatbot":
147
  st.session_state.chat_session = loaded_chat
148
  else:
149
  st.session_state.chat_session = model.start_chat(history=[])
150
- # No enviar autom谩ticamente el system_prompt aqu铆
 
151
 
152
  st.title("Gnosticdev Chatbot")
153
- user_prompt = st.chat_input("Preguntame algo...")
 
 
 
 
 
 
 
 
154
  if user_prompt:
155
  processed_user_prompt = process_urls_in_prompt(user_prompt)
156
  st.chat_message("user").markdown(processed_user_prompt)
157
- gemini_response = st.session_state.chat_session.send_message(processed_user_prompt)
 
 
 
 
158
  with st.chat_message("assistant"):
159
  st.markdown(gemini_response.text)
 
160
  save_chat_history(st.session_state.chat_session.history)
161
 
162
-
163
  elif selected == "Image Captioning":
164
  st.title("Image Caption Generation馃摳")
165
- upload_image = st.file_uploader("Sube una imagen...", type=["jpg", "jpeg", "png"])
166
-
167
- if upload_image and st.button("Generar"):
168
- try:
169
- # Cargar y mostrar la imagen
170
- image = Image.open(upload_image)
171
- col1, col2 = st.columns(2)
172
- with col1:
173
- st.image(image, caption="Imagen subida", use_column_width=True)
174
-
175
- # Generar un subt铆tulo usando una funci贸n de visi贸n AI
176
- default_prompt = "Escribe un subt铆tulo para esta imagen"
177
- caption = gemini_pro_vision_responce(default_prompt, image)
178
-
179
- # Mostrar el resultado
180
- with col2:
181
- st.info(caption)
182
- except Exception as e:
183
- st.error(f"Error procesando la imagen: {e}")
184
-
185
- # Mensaje por defecto si ninguna secci贸n est谩 seleccionada
186
- else:
187
- st.write("Selecciona una opci贸n en el men煤 para comenzar.")
188
-
189
-
 
8
  from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
9
  from PIL import Image
10
 
11
+ # Configuraci贸n de la p谩gina
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  st.set_page_config(
13
  page_title="GnosticDev AI",
14
  page_icon="馃",
 
16
  initial_sidebar_state="expanded",
17
  )
18
 
 
 
 
 
19
  # Funci贸n para guardar el historial en cookies
20
  def save_chat_history(history):
21
  serializable_history = []
 
43
  st.error(f"Error cargando el historial: {e}")
44
  return None
45
 
46
+ # Inicializar estados
47
+ if "system_prompt" not in st.session_state:
48
+ st.session_state.system_prompt = st.session_state.get('cookie_system_prompt', "")
49
+
50
  with st.sidebar:
51
  selected = option_menu(
52
  "GD AI",
 
64
  st.success("Historial borrado!")
65
 
66
  def translate_role_to_streamlit(user_role):
67
+ return "assistant" if user_role == "model" else user_role
 
 
 
68
 
69
  def extract_urls(text):
70
  url_pattern = r"(https?://\S+)"
 
73
 
74
  def fetch_url_content(url):
75
  try:
76
+ response = requests.get(url, timeout=10)
77
  response.raise_for_status()
78
  return response.text
79
  except requests.exceptions.RequestException as e:
 
82
  def process_url_content(content):
83
  try:
84
  soup = BeautifulSoup(content, "html.parser")
 
85
  text = soup.get_text(" ", strip=True)
86
  return text
87
  except Exception as e:
 
92
  new_prompt = prompt
93
  for url in urls:
94
  content = fetch_url_content(url)
95
+ if content.startswith("Error"):
96
  new_prompt = new_prompt.replace(url, content)
97
  else:
98
  processed_content = process_url_content(content)
99
  new_prompt = new_prompt.replace(url, processed_content)
100
  return new_prompt
101
 
 
 
 
 
 
 
 
 
 
102
  if st.button("Guardar System Prompt"):
103
  processed_prompt = process_urls_in_prompt(new_system_prompt)
104
  st.session_state.system_prompt = processed_prompt
105
+ st.session_state.cookie_system_prompt = processed_prompt
106
  if "chat_session" in st.session_state:
107
  del st.session_state.chat_session
108
  st.success("System Prompt actualizado con 茅xito!")
 
119
  st.session_state.chat_session = loaded_chat
120
  else:
121
  st.session_state.chat_session = model.start_chat(history=[])
122
+ if st.session_state.system_prompt:
123
+ st.session_state.chat_session.send_message(st.session_state.system_prompt)
124
 
125
  st.title("Gnosticdev Chatbot")
126
+ if st.session_state.system_prompt:
127
+ with st.expander("Ver System Prompt actual"):
128
+ st.info(st.session_state.system_prompt)
129
+
130
+ for message in st.session_state.chat_session.history:
131
+ with st.chat_message(translate_role_to_streamlit(message.role)):
132
+ st.markdown(message.parts[0].text)
133
+
134
+ user_prompt = st.chat_input("Preg煤ntame algo...")
135
  if user_prompt:
136
  processed_user_prompt = process_urls_in_prompt(user_prompt)
137
  st.chat_message("user").markdown(processed_user_prompt)
138
+
139
+ # Combina el System Prompt con la entrada del usuario
140
+ full_prompt = f"{st.session_state.system_prompt}\nUser: {processed_user_prompt}\nAI:"
141
+ gemini_response = st.session_state.chat_session.send_message(full_prompt)
142
+
143
  with st.chat_message("assistant"):
144
  st.markdown(gemini_response.text)
145
+
146
  save_chat_history(st.session_state.chat_session.history)
147
 
 
148
  elif selected == "Image Captioning":
149
  st.title("Image Caption Generation馃摳")
150
+ upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
151
+ if upload_image and st.button("Generate"):
152
+ image = Image.open(upload_image)
153
+ col1, col2 = st.columns(2)
154
+ with col1:
155
+ st.image(image, caption="Uploaded Image", use_column_width=True)
156
+
157
+ # Generar un subt铆tulo para la imagen
158
+ default_prompt = "Write a caption for this image"
159
+ caption = gemini_pro_vision_responce(default_prompt, image)
160
+
161
+ with col2:
162
+ st.info(caption)
163
+
164
+ # Fin del archivo