import streamlit as st from PIL import Image import base64 import yaml import os import urllib.request import tarfile from yaml.loader import SafeLoader import warnings import utilities.invoke_models as invoke_models import threading warnings.filterwarnings("ignore", category=DeprecationWarning) # Page setup st.set_page_config( layout="wide", page_icon="/home/ubuntu/images/opensearch_mark_default.png" ) # def async_bedrock_warmup(): # try: # _ = invoke_models.invoke_model_mm("hi", "none") # _ = invoke_models.invoke_model("hi") # _ = invoke_models.invoke_llm_model("hi", False) # st.session_state.bedrock_warmup_done = True # print("[Background Warmup] Done.") # except Exception as e: # print(f"[Background Warmup] Failed: {e}") # if "bedrock_warmup_done" not in st.session_state: # async_bedrock_warmup() st.markdown("""
""", unsafe_allow_html=True) st.markdown(""" """, unsafe_allow_html=True) # Header with logo and title col_logo, col_title = st.columns([32, 68]) with col_logo: st.image("/home/user/app/images/OS_AI_1_cropped.png", use_column_width=True) spacer_col = st.columns(1)[0] with spacer_col: st.markdown("
", unsafe_allow_html=True) col1, col2, col3 = st.columns([1, 1, 1]) with col1: st.markdown("""
🔍
AI Search
Explore ML search types, Re-ranking and Query rewriting on retail data
""", unsafe_allow_html=True) with col2: st.markdown("""
💬
Multimodal RAG
Explore Multimodal RAG over complex PDFs (with tables, graphs etc)
""", unsafe_allow_html=True) st.markdown("""
""", unsafe_allow_html=True) with col3: st.markdown("""
🤖
Agentic RAG
Explore how an AI agent in front of RAG enhances product search experience
""", unsafe_allow_html=True) st.markdown(""" """, unsafe_allow_html=True) st.markdown(""" """, unsafe_allow_html=True) st.markdown(""" """, unsafe_allow_html=True)