Spaces:
Sleeping
Sleeping
tweaks
Browse files- app.py +3 -2
- chatmap.py +59 -5
- requirements.txt +1 -0
app.py
CHANGED
@@ -188,7 +188,7 @@ with st.sidebar:
|
|
188 |
"## Protected Areas"
|
189 |
|
190 |
if st.toggle("PAD US-3", True):
|
191 |
-
alpha = st.slider("transparency", 0.0, 1.
|
192 |
|
193 |
with st.expander("custom style"):
|
194 |
custom = st.text_area(
|
@@ -328,6 +328,7 @@ def summary_table(column = column, colors = colors):
|
|
328 |
return df.to_pandas()
|
329 |
|
330 |
df = summary_table(column, colors)
|
|
|
331 |
|
332 |
|
333 |
base = alt.Chart(df).encode(
|
@@ -368,7 +369,7 @@ rsr_chart = alt.Chart(df).mark_bar().encode(
|
|
368 |
col1, col2, col3 = st.columns(3)
|
369 |
|
370 |
with col1:
|
371 |
-
f"#### Percent of Continental US Area"
|
372 |
st.altair_chart(area_chart, use_container_width=True)
|
373 |
|
374 |
# -
|
|
|
188 |
"## Protected Areas"
|
189 |
|
190 |
if st.toggle("PAD US-3", True):
|
191 |
+
alpha = st.slider("transparency", 0.0, 1.0, 0.5)
|
192 |
|
193 |
with st.expander("custom style"):
|
194 |
custom = st.text_area(
|
|
|
328 |
return df.to_pandas()
|
329 |
|
330 |
df = summary_table(column, colors)
|
331 |
+
total_percent = df.percent_protected.sum()
|
332 |
|
333 |
|
334 |
base = alt.Chart(df).encode(
|
|
|
369 |
col1, col2, col3 = st.columns(3)
|
370 |
|
371 |
with col1:
|
372 |
+
f"#### {total_percent} Percent of Continental US Area Protected"
|
373 |
st.altair_chart(area_chart, use_container_width=True)
|
374 |
|
375 |
# -
|
chatmap.py
CHANGED
@@ -1,7 +1,16 @@
|
|
1 |
import streamlit as st
|
2 |
from openai import OpenAI
|
|
|
3 |
|
4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
# Set OpenAI API key from Streamlit secrets
|
7 |
client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
|
@@ -9,23 +18,64 @@ client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
|
|
9 |
# Set a default model
|
10 |
if "openai_model" not in st.session_state:
|
11 |
st.session_state["openai_model"] = "gpt-3.5-turbo"
|
|
|
12 |
|
13 |
# Initialize chat history
|
14 |
if "messages" not in st.session_state:
|
15 |
st.session_state.messages = []
|
16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
# Display chat messages from history on app rerun
|
18 |
for message in st.session_state.messages:
|
19 |
with st.chat_message(message["role"]):
|
20 |
st.markdown(message["content"])
|
21 |
|
22 |
# Accept user input
|
23 |
-
if prompt := st.chat_input("What is
|
24 |
# Add user message to chat history
|
|
|
|
|
25 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
26 |
# Display user message in chat message container
|
27 |
-
with st.chat_message("user"):
|
28 |
-
st.markdown(prompt)
|
29 |
|
30 |
# Display assistant response in chat message container
|
31 |
with st.chat_message("assistant"):
|
@@ -38,5 +88,9 @@ if prompt := st.chat_input("What is up?"):
|
|
38 |
stream=True,
|
39 |
)
|
40 |
response = st.write_stream(stream)
|
|
|
|
|
|
|
41 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
42 |
-
|
|
|
|
1 |
import streamlit as st
|
2 |
from openai import OpenAI
|
3 |
+
import duckdb
|
4 |
|
5 |
+
import duckdb
|
6 |
+
duckdb.install_extension("spatial")
|
7 |
+
duckdb.load_extension("spatial")
|
8 |
+
duckdb.install_extension("httpfs")
|
9 |
+
duckdb.load_extension("httpfs")
|
10 |
+
|
11 |
+
duckdb.sql("create or replace view pad as select * from read_parquet('https://data.source.coop/cboettig/pad-us-3/pad-mobi.parquet')")
|
12 |
+
|
13 |
+
st.title("ChatGPT SQL Assistant")
|
14 |
|
15 |
# Set OpenAI API key from Streamlit secrets
|
16 |
client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
|
|
|
18 |
# Set a default model
|
19 |
if "openai_model" not in st.session_state:
|
20 |
st.session_state["openai_model"] = "gpt-3.5-turbo"
|
21 |
+
# "gpt-4"
|
22 |
|
23 |
# Initialize chat history
|
24 |
if "messages" not in st.session_state:
|
25 |
st.session_state.messages = []
|
26 |
|
27 |
+
|
28 |
+
setup = '''
|
29 |
+
You are a database administrator, and expert in SQL. You will be helping me write complex SQL queries. I will explain you my needs, you will generate SQL queries against my database.
|
30 |
+
|
31 |
+
My application does: Conservation prioritization of protected areas to help meet US 30x30 conservation goals.
|
32 |
+
|
33 |
+
Please reply only with the SQL code that I will need to execute. Do not include an explanation of the code.
|
34 |
+
|
35 |
+
The database is a POSTGIS Postgres database, please take it into consideration when generating PLSQL/SQL. Please avoid ST_Within queries if possible, because they are so slow.
|
36 |
+
|
37 |
+
I will provide you with a description of the structure of my tables. You must remember them and use them for generating SQL queries. Once you read them all, just answer OK, nothing else.
|
38 |
+
|
39 |
+
Here are the tables :
|
40 |
+
|
41 |
+
Table "pad"
|
42 |
+
βββββββββββββββ¬ββββββββββββββ¬ββββββββββ¬ββββββββββ¬ββββββββββ¬ββββββββββ
|
43 |
+
β column_name β column_type β null β key β default β extra β
|
44 |
+
β varchar β varchar β varchar β varchar β varchar β varchar β
|
45 |
+
βββββββββββββββΌββββββββββββββΌββββββββββΌββββββββββΌββββββββββΌββββββββββ€
|
46 |
+
β FID β INTEGER β YES β NULL β NULL β NULL β
|
47 |
+
β time β VARCHAR β YES β NULL β NULL β NULL β
|
48 |
+
β rsr β DOUBLE β YES β NULL β NULL β NULL β
|
49 |
+
β richness β DOUBLE β YES β NULL β NULL β NULL β
|
50 |
+
β bucket β VARCHAR β YES β NULL β NULL β NULL β
|
51 |
+
β FeatClass β VARCHAR β YES β NULL β NULL β NULL β
|
52 |
+
β Mang_Name β VARCHAR β YES β NULL β NULL β NULL β
|
53 |
+
β Mang_Type β VARCHAR β YES β NULL β NULL β NULL β
|
54 |
+
β Des_Tp β VARCHAR β YES β NULL β NULL β NULL β
|
55 |
+
β Pub_Access β VARCHAR β YES β NULL β NULL β NULL β
|
56 |
+
β GAP_Sts β VARCHAR β YES β NULL β NULL β NULL β
|
57 |
+
β IUCN_Cat β VARCHAR β YES β NULL β NULL β NULL β
|
58 |
+
β Unit_Nm β VARCHAR β YES β NULL β NULL β NULL β
|
59 |
+
β area β DOUBLE β YES β NULL β NULL β NULL β
|
60 |
+
β geometry β BLOB β YES β NULL β NULL β NULL β
|
61 |
+
βββββββββββββββ΄ββββββββββββββ΄ββββββββββ΄ββββββββββ΄ββββββββββ΄ββββββββββ€
|
62 |
+
βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
63 |
+
'''
|
64 |
+
|
65 |
# Display chat messages from history on app rerun
|
66 |
for message in st.session_state.messages:
|
67 |
with st.chat_message(message["role"]):
|
68 |
st.markdown(message["content"])
|
69 |
|
70 |
# Accept user input
|
71 |
+
if prompt := st.chat_input("What is the total area in each GAP_Sts?"):
|
72 |
# Add user message to chat history
|
73 |
+
st.session_state.messages.append({"role": "system", "content": setup})
|
74 |
+
|
75 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
76 |
# Display user message in chat message container
|
77 |
+
# with st.chat_message("user"):
|
78 |
+
#st.markdown(prompt)
|
79 |
|
80 |
# Display assistant response in chat message container
|
81 |
with st.chat_message("assistant"):
|
|
|
88 |
stream=True,
|
89 |
)
|
90 |
response = st.write_stream(stream)
|
91 |
+
st.divider()
|
92 |
+
df = duckdb.sql(response).df()
|
93 |
+
st.table(df)
|
94 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
95 |
+
|
96 |
+
|
requirements.txt
CHANGED
@@ -5,4 +5,5 @@ streamlit
|
|
5 |
leafmap
|
6 |
ibis-framework[duckdb]
|
7 |
altair
|
|
|
8 |
|
|
|
5 |
leafmap
|
6 |
ibis-framework[duckdb]
|
7 |
altair
|
8 |
+
openai
|
9 |
|