File size: 4,754 Bytes
72b06f2 32a4065 05578d3 32a4065 2c1d1c1 aff2aa4 7821eb2 2c1d1c1 ee7cbe0 32a4065 72b06f2 2ab1dbf 2c1d1c1 72b06f2 3896508 72b06f2 3896508 72b06f2 8c78cb6 72b06f2 8f82925 1292ade c9ed3c5 72b06f2 c9ed3c5 3896508 72b06f2 b7d0dc8 72b06f2 b7d0dc8 72b06f2 b7d0dc8 72b06f2 b7d0dc8 72b06f2 c9ed3c5 72b06f2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 |
import streamlit as st
from controller import handle_submission, handle_submission_chat
# For Altair charts
import altair as alt
from PIL import Image
from pydub import AudioSegment
import IPython
import soundfile as sf
from app_agent_config import AgentConfig
from tool_loader import ToolLoader
import re,sys,unicodedata
import pandas as pd # If you're working with DataFrames
import matplotlib.figure # If you're using matplotlib figures
import numpy as np
# For Bokeh charts
from bokeh.models import Plot
# For Plotly charts
import plotly.express as px
# For Pydeck charts
import pydeck as pdk
# Call app_agent_config to set the global variables
def app_chat(agent_config):
# Chat code (user input, agent responses, etc.)
if "messages" not in st.session_state:
st.session_state.messages = []
st.markdown("Hello there! How can I assist you today?")
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
#with st.chat_message("assistant"):
# st.markdown("Hello there! How can I assist you today?")
if user_message := st.chat_input("Enter message"):
st.chat_message("user").markdown(user_message)
st.session_state.messages.append({"role": "user", "content": user_message})
selected_tools = [agent_config.tool_loader.tools[idx] for idx, checkbox in enumerate(agent_config.tool_checkboxes) if checkbox]
# Handle submission with the selected inference URL
#app_agent_config()
response = handle_submission(user_message, selected_tools, agent_config.url_endpoint, agent_config.document, agent_config.image, agent_config.context)
with st.chat_message("assistant"):
if response is None:
chat_respone = handle_submission_chat(user_message, response)
st.write(chat_respone)
# st.warning("The agent's response is None. Please try again. Generate an image of a flying uncormn.")
elif isinstance(response, Image.Image):
agent_config.image = response
chat_respone = handle_submission_chat(user_message, "No context . Created an image.")
st.write(chat_respone)
st.image(response)
elif isinstance(response, AudioSegment):
agent_config.audio = response
chat_respone = handle_submission_chat(user_message, "No context . Created audio file.")
st.write(chat_respone)
st.audio(response)
elif isinstance(response, int):
chat_respone = handle_submission_chat(user_message, response)
st.write(chat_respone)
st.markdown(response)
elif isinstance(response, str):
if "emojified_text" in response:
chat_respone = handle_submission_chat(user_message, "No context . Created the text with emojies.")
st.write(chat_respone)
st.markdown(f"{response['emojified_text']}")
else:
chat_respone = handle_submission_chat(user_message, response)
st.write(chat_respone)
st.markdown(response)
elif isinstance(response, list):
for item in response:
st.markdown(item) # Assuming the list contains strings
elif isinstance(response, pd.DataFrame):
st.dataframe(response)
elif isinstance(response, pd.Series):
st.table(response.iloc[0:10])
elif isinstance(response, dict):
st.json(response)
elif isinstance(response, st.graphics_altair.AltairChart):
st.altair_chart(response)
elif isinstance(response, st.graphics_bokeh.BokehChart):
st.bokeh_chart(response)
elif isinstance(response, st.graphics_graphviz.GraphvizChart):
st.graphviz_chart(response)
elif isinstance(response, st.graphics_plotly.PlotlyChart):
st.plotly_chart(response)
elif isinstance(response, st.graphics_pydeck.PydeckChart):
st.pydeck_chart(response)
elif isinstance(response, matplotlib.figure.Figure):
st.pyplot(response)
elif isinstance(response, streamlit.graphics_vega_lite.VegaLiteChart):
st.vega_lite_chart(response)
else:
st.warning("Unrecognized response type. Please try again. e.g. Generate an image of a flying horse.")
st.session_state.messages.append({"role": "assistant", "content": response})
|