File size: 4,479 Bytes
b8e1c73 fa92574 7eda8e3 fa92574 b8e1c73 fa92574 7eda8e3 fa92574 7eda8e3 fa92574 7eda8e3 fa92574 7eda8e3 fa92574 7eda8e3 fa92574 b8e1c73 7eda8e3 b8e1c73 7eda8e3 b8e1c73 7eda8e3 b8e1c73 7eda8e3 b8e1c73 ca60894 7eda8e3 f54757c b8e1c73 2251b3a f54757c b8e1c73 f54757c b8e1c73 f54757c a36d2f3 f54757c a36d2f3 7eda8e3 f54757c fa92574 f54757c a36d2f3 08bd785 7eda8e3 08bd785 f54757c 63655e9 08bd785 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 |
import os
import streamlit as st
import json
from streamlit_option_menu import option_menu
from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
from PIL import Image
# Configuraci贸n de la p谩gina
st.set_page_config(
page_title="GnosticDev AI",
page_icon="馃",
layout="centered",
initial_sidebar_state="expanded",
)
# Funci贸n para guardar el historial en el estado de la sesi贸n
def save_chat_history(history):
serializable_history = []
for message in history:
serializable_history.append({
"role": message.role,
"text": message.parts[0].text
})
st.session_state.chat_history = serializable_history
# Funci贸n para cargar el historial desde el estado de la sesi贸n
def load_chat_history():
if 'chat_history' in st.session_state:
try:
history = st.session_state.chat_history
model = load_gemini_pro()
chat = model.start_chat(history=[])
if st.session_state.system_prompt:
chat.send_message(st.session_state.system_prompt)
for message in history:
if message["role"] != "model" or not message["text"].startswith(st.session_state.system_prompt):
chat.send_message(message["text"])
return chat
except Exception as e:
st.error(f"Error cargando el historial: {e}")
return None
# Inicializar estados
if "system_prompt" not in st.session_state:
st.session_state.system_prompt = ""
if "chat_history" not in st.session_state:
st.session_state.chat_history = []
with st.sidebar:
selected = option_menu(
"GD AI",
["Chatbot", "Image Captioning"], # Eliminamos "System Prompt"
menu_icon="robot",
icons=['chat-dots-fill', 'image-fill'],
default_index=0
)
# Bot贸n para borrar historial
if st.button("Borrar Historial"):
st.session_state.chat_history = []
st.session_state.chat_session = None
st.success("Historial borrado!")
def translate_role_to_streamlit(user_role):
if user_role == "model":
return "assistant"
else:
return user_role
if selected == "Chatbot":
model = load_gemini_pro()
# Inicializar o cargar sesi贸n de chat
if "chat_session" not in st.session_state:
loaded_chat = load_chat_history()
if loaded_chat:
st.session_state.chat_session = loaded_chat
else:
st.session_state.chat_session = model.start_chat(history=[])
if st.session_state.system_prompt:
st.session_state.chat_session.send_message(st.session_state.system_prompt)
st.title("Gnosticdev Chatbot")
if st.session_state.system_prompt:
with st.expander("Ver System Prompt actual"):
st.info(st.session_state.system_prompt)
# Mostrar historial
for message in st.session_state.chat_session.history:
with st.chat_message(translate_role_to_streamlit(message.role)):
st.markdown(message.parts[0].text)
# Campo de entrada
user_prompt = st.chat_input("Preguntame algo...")
if user_prompt:
st.chat_message("user").markdown(user_prompt)
gemini_response = st.session_state.chat_session.send_message(user_prompt)
with st.chat_message("assistant"):
st.markdown(gemini_response.text)
# Guardar historial actualizado
save_chat_history(st.session_state.chat_session.history)
# Opci贸n para descargar el historial del chat
if st.button("Descargar Historial del Chat"):
chat_history = "\n".join([f"{msg['role']}: {msg['text']}" for msg in st.session_state.chat_history])
st.download_button(
label="Descargar",
data=chat_history,
file_name="historial_chat.txt",
mime="text/plain"
)
elif selected == "Image Captioning":
st.title("Image Caption Generation馃摳")
upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"])
if upload_image and st.button("Generate"):
image = Image.open(upload_image)
col1, col2 = st.columns(2)
with col1:
st.image(image, caption="Uploaded Image", use_column_width=True)
default_prompt = "Write a caption for this image"
caption = gemini_pro_vision_responce(default_prompt, image)
with col2:
st.info(caption)
|