|
import os |
|
import streamlit as st |
|
import json |
|
from streamlit_option_menu import option_menu |
|
from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce) |
|
from PIL import Image |
|
|
|
|
|
st.set_page_config( |
|
page_title="GnosticDev AI", |
|
page_icon="馃", |
|
layout="centered", |
|
initial_sidebar_state="expanded", |
|
) |
|
|
|
|
|
def save_chat_history(history): |
|
serializable_history = [] |
|
for message in history: |
|
serializable_history.append({ |
|
"role": message.role, |
|
"text": message.parts[0].text |
|
}) |
|
st.session_state.chat_history = serializable_history |
|
|
|
|
|
def load_chat_history(): |
|
if 'chat_history' in st.session_state: |
|
try: |
|
history = st.session_state.chat_history |
|
model = load_gemini_pro() |
|
chat = model.start_chat(history=[]) |
|
if st.session_state.system_prompt: |
|
chat.send_message(st.session_state.system_prompt) |
|
for message in history: |
|
if message["role"] != "model" or not message["text"].startswith(st.session_state.system_prompt): |
|
chat.send_message(message["text"]) |
|
return chat |
|
except Exception as e: |
|
st.error(f"Error cargando el historial: {e}") |
|
return None |
|
|
|
|
|
if "system_prompt" not in st.session_state: |
|
st.session_state.system_prompt = "" |
|
|
|
if "chat_history" not in st.session_state: |
|
st.session_state.chat_history = [] |
|
|
|
with st.sidebar: |
|
selected = option_menu( |
|
"GD AI", |
|
["Chatbot", "Image Captioning"], |
|
menu_icon="robot", |
|
icons=['chat-dots-fill', 'image-fill'], |
|
default_index=0 |
|
) |
|
|
|
|
|
if st.button("Borrar Historial"): |
|
st.session_state.chat_history = [] |
|
st.session_state.chat_session = None |
|
st.success("Historial borrado!") |
|
|
|
def translate_role_to_streamlit(user_role): |
|
if user_role == "model": |
|
return "assistant" |
|
else: |
|
return user_role |
|
|
|
if selected == "Chatbot": |
|
model = load_gemini_pro() |
|
|
|
|
|
if "chat_session" not in st.session_state: |
|
loaded_chat = load_chat_history() |
|
if loaded_chat: |
|
st.session_state.chat_session = loaded_chat |
|
else: |
|
st.session_state.chat_session = model.start_chat(history=[]) |
|
if st.session_state.system_prompt: |
|
st.session_state.chat_session.send_message(st.session_state.system_prompt) |
|
|
|
st.title("Gnosticdev Chatbot") |
|
|
|
if st.session_state.system_prompt: |
|
with st.expander("Ver System Prompt actual"): |
|
st.info(st.session_state.system_prompt) |
|
|
|
|
|
for message in st.session_state.chat_session.history: |
|
with st.chat_message(translate_role_to_streamlit(message.role)): |
|
st.markdown(message.parts[0].text) |
|
|
|
|
|
user_prompt = st.chat_input("Preguntame algo...") |
|
if user_prompt: |
|
st.chat_message("user").markdown(user_prompt) |
|
gemini_response = st.session_state.chat_session.send_message(user_prompt) |
|
with st.chat_message("assistant"): |
|
st.markdown(gemini_response.text) |
|
|
|
|
|
save_chat_history(st.session_state.chat_session.history) |
|
|
|
|
|
if st.button("Descargar Historial del Chat"): |
|
chat_history = "\n".join([f"{msg['role']}: {msg['text']}" for msg in st.session_state.chat_history]) |
|
st.download_button( |
|
label="Descargar", |
|
data=chat_history, |
|
file_name="historial_chat.txt", |
|
mime="text/plain" |
|
) |
|
|
|
elif selected == "Image Captioning": |
|
st.title("Image Caption Generation馃摳") |
|
upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"]) |
|
|
|
if upload_image and st.button("Generate"): |
|
image = Image.open(upload_image) |
|
col1, col2 = st.columns(2) |
|
with col1: |
|
st.image(image, caption="Uploaded Image", use_column_width=True) |
|
default_prompt = "Write a caption for this image" |
|
caption = gemini_pro_vision_responce(default_prompt, image) |
|
with col2: |
|
st.info(caption) |
|
|
|
|