gnosticdev commited on
Commit
9dd3d1d
verified
1 Parent(s): 300e083

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +85 -80
app.py CHANGED
@@ -4,8 +4,9 @@ import json
4
  from streamlit_option_menu import option_menu
5
  from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
6
  from PIL import Image
 
7
 
8
- # Setting the page config
9
  st.set_page_config(
10
  page_title="GnosticDev AI",
11
  page_icon="馃",
@@ -13,123 +14,127 @@ st.set_page_config(
13
  initial_sidebar_state="expanded",
14
  )
15
 
16
- # Funci贸n para guardar el historial en cookies
17
- def save_chat_history(history):
18
- # Convertir el historial a un formato serializable
19
- serializable_history = []
20
- for message in history:
21
- serializable_history.append({
22
- "role": message.role,
23
- "text": message.parts[0].text
24
- })
25
- # Guardar en cookie
26
- st.session_state.cookie_chat_history = json.dumps(serializable_history)
27
 
28
- # Funci贸n para cargar el historial desde cookies
29
- def load_chat_history():
30
- if 'cookie_chat_history' in st.session_state:
31
- try:
32
- history = json.loads(st.session_state.cookie_chat_history)
33
- model = load_gemini_pro()
34
- chat = model.start_chat(history=[])
35
- # Reconstruir el historial
36
- if st.session_state.system_prompt:
37
- chat.send_message(st.session_state.system_prompt)
38
- for message in history:
39
- if message["role"] != "model" or not message["text"].startswith(st.session_state.system_prompt):
40
- chat.send_message(message["text"])
41
- return chat
42
- except Exception as e:
43
- st.error(f"Error cargando el historial: {e}")
44
- return None
45
 
46
- # Inicializar estados
47
- if "system_prompt" not in st.session_state:
48
- st.session_state.system_prompt = st.session_state.get('cookie_system_prompt', "")
 
 
 
 
 
 
 
49
 
50
  with st.sidebar:
51
  selected = option_menu(
52
  "GD AI",
53
- ["System Prompt", "Chatbot", "Image Captioning"],
54
  menu_icon="robot",
55
  icons=['gear', 'chat-dots-fill', 'image-fill'],
56
  default_index=0
57
  )
58
-
59
- # Bot贸n para borrar historial
60
- if st.button("Borrar Historial"):
61
- if 'cookie_chat_history' in st.session_state:
62
- del st.session_state.cookie_chat_history
63
- if 'chat_session' in st.session_state:
64
- del st.session_state.chat_session
65
- st.success("Historial borrado!")
66
-
67
- def translate_role_to_streamlit(user_role):
68
- if user_role == "model":
69
- return "assistant"
70
- else:
71
- return user_role
72
 
73
- if selected == "System Prompt":
74
- st.title("Configuraci贸n del System Prompt")
75
 
76
- new_system_prompt = st.text_area(
77
- "Ingresa las instrucciones para el AI (System Prompt)",
78
- value=st.session_state.system_prompt,
79
- height=300,
80
- help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI"
 
81
  )
82
 
83
- if st.button("Guardar System Prompt"):
84
- st.session_state.system_prompt = new_system_prompt
85
- st.session_state.cookie_system_prompt = new_system_prompt # Guardar en cookie
86
- if "chat_session" in st.session_state:
87
- del st.session_state.chat_session
88
- st.success("System Prompt actualizado con 茅xito!")
89
-
90
- if st.session_state.system_prompt:
91
- st.markdown("### System Prompt Actual:")
92
- st.info(st.session_state.system_prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
93
 
94
  elif selected == "Chatbot":
95
  model = load_gemini_pro()
96
 
97
- # Inicializar o cargar sesi贸n de chat
98
  if "chat_session" not in st.session_state:
99
- loaded_chat = load_chat_history()
100
- if loaded_chat:
101
- st.session_state.chat_session = loaded_chat
102
- else:
103
- st.session_state.chat_session = model.start_chat(history=[])
104
- if st.session_state.system_prompt:
105
- st.session_state.chat_session.send_message(st.session_state.system_prompt)
106
 
107
  st.title("Gnosticdev Chatbot")
108
 
109
- if st.session_state.system_prompt:
 
 
110
  with st.expander("Ver System Prompt actual"):
111
- st.info(st.session_state.system_prompt)
112
 
113
- # Mostrar historial
114
  for message in st.session_state.chat_session.history:
115
  with st.chat_message(translate_role_to_streamlit(message.role)):
116
  st.markdown(message.parts[0].text)
117
 
118
- # Campo de entrada
119
  user_prompt = st.chat_input("Preguntame algo...")
120
  if user_prompt:
121
  st.chat_message("user").markdown(user_prompt)
122
  gemini_response = st.session_state.chat_session.send_message(user_prompt)
123
  with st.chat_message("assistant"):
124
  st.markdown(gemini_response.text)
125
-
126
- # Guardar historial actualizado
127
- save_chat_history(st.session_state.chat_session.history)
128
 
129
  elif selected == "Image Captioning":
130
  st.title("Image Caption Generation馃摳")
131
  upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
132
-
133
  if upload_image and st.button("Generate"):
134
  image = Image.open(upload_image)
135
  col1, col2 = st.columns(2)
 
4
  from streamlit_option_menu import option_menu
5
  from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
6
  from PIL import Image
7
+ import time
8
 
9
+ # Configuraci贸n de la p谩gina
10
  st.set_page_config(
11
  page_title="GnosticDev AI",
12
  page_icon="馃",
 
14
  initial_sidebar_state="expanded",
15
  )
16
 
17
+ # Archivo para almacenamiento permanente
18
+ PROMPTS_FILE = "training_prompts.json"
 
 
 
 
 
 
 
 
 
19
 
20
+ # Funciones para manejar el almacenamiento permanente
21
+ def load_prompts():
22
+ try:
23
+ if os.path.exists(PROMPTS_FILE):
24
+ with open(PROMPTS_FILE, 'r', encoding='utf-8') as file:
25
+ return json.load(file)
26
+ except Exception as e:
27
+ st.error(f"Error cargando prompts: {e}")
28
+ return {"prompts": [], "current_prompt": ""}
 
 
 
 
 
 
 
 
29
 
30
+ def save_prompts(prompts_data):
31
+ try:
32
+ with open(PROMPTS_FILE, 'w', encoding='utf-8') as file:
33
+ json.dump(prompts_data, file, ensure_ascii=False, indent=2)
34
+ except Exception as e:
35
+ st.error(f"Error guardando prompts: {e}")
36
+
37
+ # Inicializaci贸n de datos
38
+ if 'prompts_data' not in st.session_state:
39
+ st.session_state.prompts_data = load_prompts()
40
 
41
  with st.sidebar:
42
  selected = option_menu(
43
  "GD AI",
44
+ ["System Prompts", "Chatbot", "Image Captioning"],
45
  menu_icon="robot",
46
  icons=['gear', 'chat-dots-fill', 'image-fill'],
47
  default_index=0
48
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
 
50
+ if selected == "System Prompts":
51
+ st.title("Gesti贸n de System Prompts")
52
 
53
+ # 脕rea para nuevo prompt
54
+ new_prompt = st.text_area(
55
+ "Nuevo System Prompt",
56
+ value="",
57
+ height=200,
58
+ help="Escribe aqu铆 las nuevas instrucciones para el AI"
59
  )
60
 
61
+ col1, col2 = st.columns([1, 2])
62
+ with col1:
63
+ if st.button("A帽adir Nuevo Prompt"):
64
+ if new_prompt and new_prompt not in st.session_state.prompts_data["prompts"]:
65
+ st.session_state.prompts_data["prompts"].append(new_prompt)
66
+ save_prompts(st.session_state.prompts_data)
67
+ st.success("Nuevo prompt a帽adido!")
68
+ time.sleep(1)
69
+ st.rerun()
70
+
71
+ # Lista de prompts guardados
72
+ st.markdown("### Prompts Guardados")
73
+ for i, prompt in enumerate(st.session_state.prompts_data["prompts"]):
74
+ with st.expander(f"Prompt {i+1}"):
75
+ st.text_area("", prompt, height=100, key=f"prompt_{i}", disabled=True)
76
+ col1, col2, col3 = st.columns([1, 1, 1])
77
+ with col1:
78
+ if st.button("Usar este prompt", key=f"use_{i}"):
79
+ st.session_state.prompts_data["current_prompt"] = prompt
80
+ save_prompts(st.session_state.prompts_data)
81
+ if "chat_session" in st.session_state:
82
+ del st.session_state.chat_session
83
+ st.success("Prompt activado!")
84
+ with col2:
85
+ if st.button("Editar", key=f"edit_{i}"):
86
+ st.session_state.editing_prompt = i
87
+ st.session_state.editing_text = prompt
88
+ with col3:
89
+ if st.button("Eliminar", key=f"delete_{i}"):
90
+ st.session_state.prompts_data["prompts"].pop(i)
91
+ save_prompts(st.session_state.prompts_data)
92
+ st.success("Prompt eliminado!")
93
+ time.sleep(1)
94
+ st.rerun()
95
+
96
+ # Mostrar prompt actual
97
+ st.markdown("### Prompt Actual")
98
+ current_prompt = st.session_state.prompts_data.get("current_prompt", "")
99
+ if current_prompt:
100
+ st.info(current_prompt)
101
+ else:
102
+ st.warning("No hay prompt activo")
103
 
104
  elif selected == "Chatbot":
105
  model = load_gemini_pro()
106
 
107
+ # Initialize chat session with current prompt
108
  if "chat_session" not in st.session_state:
109
+ st.session_state.chat_session = model.start_chat(history=[])
110
+ current_prompt = st.session_state.prompts_data.get("current_prompt")
111
+ if current_prompt:
112
+ st.session_state.chat_session.send_message(current_prompt)
 
 
 
113
 
114
  st.title("Gnosticdev Chatbot")
115
 
116
+ # Mostrar prompt actual
117
+ current_prompt = st.session_state.prompts_data.get("current_prompt")
118
+ if current_prompt:
119
  with st.expander("Ver System Prompt actual"):
120
+ st.info(current_prompt)
121
 
122
+ # Display chat history
123
  for message in st.session_state.chat_session.history:
124
  with st.chat_message(translate_role_to_streamlit(message.role)):
125
  st.markdown(message.parts[0].text)
126
 
127
+ # Chat input
128
  user_prompt = st.chat_input("Preguntame algo...")
129
  if user_prompt:
130
  st.chat_message("user").markdown(user_prompt)
131
  gemini_response = st.session_state.chat_session.send_message(user_prompt)
132
  with st.chat_message("assistant"):
133
  st.markdown(gemini_response.text)
 
 
 
134
 
135
  elif selected == "Image Captioning":
136
  st.title("Image Caption Generation馃摳")
137
  upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
 
138
  if upload_image and st.button("Generate"):
139
  image = Image.open(upload_image)
140
  col1, col2 = st.columns(2)