Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,52 +1,52 @@
|
|
1 |
-
import os
|
2 |
-
import streamlit as st
|
3 |
-
from PIL import Image
|
4 |
-
|
5 |
-
from huggingface_hub import Repository
|
6 |
-
from huggingface_hub import login
|
7 |
-
|
8 |
-
login(token = os.environ['HF_TOKEN'])
|
9 |
-
|
10 |
-
repo = Repository(
|
11 |
-
local_dir="agent_function",
|
12 |
-
repo_type="dataset",
|
13 |
-
clone_from=os.environ['DATASET'],
|
14 |
-
token=True
|
15 |
-
)
|
16 |
-
repo.git_pull()
|
17 |
-
|
18 |
-
from agent_function.function import generate_answer
|
19 |
-
|
20 |
-
Image.MAX_IMAGE_PIXELS = None
|
21 |
-
|
22 |
-
st.set_page_config(layout="wide")
|
23 |
-
|
24 |
-
st.title("
|
25 |
-
col1, col2 = st.columns([1 ,2])
|
26 |
-
|
27 |
-
if "messages" not in st.session_state:
|
28 |
-
st.session_state.messages = []
|
29 |
-
|
30 |
-
if "overlay" not in st.session_state:
|
31 |
-
st.session_state.overlay = Image.open(f'agent_function/base.png')
|
32 |
-
|
33 |
-
with col1:
|
34 |
-
messages_box = st.container(height=500)
|
35 |
-
# Display chat messages from history on app rerun
|
36 |
-
for message in st.session_state.messages:
|
37 |
-
with messages_box.chat_message(message["role"]):
|
38 |
-
st.markdown(message["content"])
|
39 |
-
|
40 |
-
# React to user input
|
41 |
-
if prompt := st.chat_input("What is up?"):
|
42 |
-
messages_box.chat_message("user").markdown(prompt)
|
43 |
-
st.session_state.messages.append({"role": "user", "content": prompt})
|
44 |
-
response = generate_answer(prompt)
|
45 |
-
with messages_box.chat_message("assistant"):
|
46 |
-
st.markdown(response)
|
47 |
-
# Add assistant response to chat history
|
48 |
-
st.session_state.messages.append({"role": "assistant", "content": response})
|
49 |
-
|
50 |
-
with col2:
|
51 |
-
with st.spinner():
|
52 |
st.image(st.session_state.overlay)
|
|
|
1 |
+
import os
|
2 |
+
import streamlit as st
|
3 |
+
from PIL import Image
|
4 |
+
|
5 |
+
from huggingface_hub import Repository
|
6 |
+
from huggingface_hub import login
|
7 |
+
|
8 |
+
login(token = os.environ['HF_TOKEN'])
|
9 |
+
|
10 |
+
repo = Repository(
|
11 |
+
local_dir="agent_function",
|
12 |
+
repo_type="dataset",
|
13 |
+
clone_from=os.environ['DATASET'],
|
14 |
+
token=True
|
15 |
+
)
|
16 |
+
repo.git_pull()
|
17 |
+
|
18 |
+
from agent_function.function import generate_answer
|
19 |
+
|
20 |
+
Image.MAX_IMAGE_PIXELS = None
|
21 |
+
|
22 |
+
st.set_page_config(layout="wide")
|
23 |
+
|
24 |
+
st.title("Mapbot")
|
25 |
+
col1, col2 = st.columns([1 ,2])
|
26 |
+
|
27 |
+
if "messages" not in st.session_state:
|
28 |
+
st.session_state.messages = []
|
29 |
+
|
30 |
+
if "overlay" not in st.session_state:
|
31 |
+
st.session_state.overlay = Image.open(f'agent_function/base.png')
|
32 |
+
|
33 |
+
with col1:
|
34 |
+
messages_box = st.container(height=500)
|
35 |
+
# Display chat messages from history on app rerun
|
36 |
+
for message in st.session_state.messages:
|
37 |
+
with messages_box.chat_message(message["role"]):
|
38 |
+
st.markdown(message["content"])
|
39 |
+
|
40 |
+
# React to user input
|
41 |
+
if prompt := st.chat_input("What is up?"):
|
42 |
+
messages_box.chat_message("user").markdown(prompt)
|
43 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
44 |
+
response = generate_answer(prompt)
|
45 |
+
with messages_box.chat_message("assistant"):
|
46 |
+
st.markdown(response)
|
47 |
+
# Add assistant response to chat history
|
48 |
+
st.session_state.messages.append({"role": "assistant", "content": response})
|
49 |
+
|
50 |
+
with col2:
|
51 |
+
with st.spinner():
|
52 |
st.image(st.session_state.overlay)
|