Spaces:
Sleeping
Sleeping
import numpy as np | |
import streamlit as st | |
import requests | |
import pydeck as pdk | |
import pandas as pd | |
import geopandas as gpd | |
import plotly.express as px | |
import folium | |
import webbrowser | |
from shapely.geometry import Point | |
from folium import plugins | |
from streamlit_folium import st_folium | |
def load_polygon(filepath): | |
return gpd.read_file(filepath) | |
path='Z:/Shared/Axeria Shared/Pricing/Immopolis Pricing Review/DATA/' | |
# Polygon1 = load_polygon(path + 'risk_zones.shp') | |
# Polygon2 = load_polygon(path + 'Flooding/n_inondable_01_01for_s.shp') | |
# Polygon3 = load_polygon(path + 'ZUS/ZUS_FRM_BDA09_L93.shp') | |
# | |
# Initialize polygons if not already in session state | |
if 'polygons' not in st.session_state: | |
st.session_state.polygons = { | |
"Polygon1": load_polygon(path+'risk_zones.shp'), | |
"Polygon2": load_polygon(path+'Flooding/n_inondable_01_01for_s.shp'), | |
"Polygon3": load_polygon(path+'ZUS/ZUS_FRM_BDA09_L93.shp') | |
} | |
if 'polygons' in st.session_state: | |
st.session_state.polygons["Polygon1"]['geometry'] = st.session_state.polygons["Polygon1"]['geometry'].to_crs(epsg=4326) | |
st.session_state.polygons["Polygon2"]['geometry'] = st.session_state.polygons["Polygon2"]['geometry'].to_crs(epsg=4326) | |
st.session_state.polygons["Polygon3"]['geometry'] = st.session_state.polygons["Polygon3"]['geometry'].to_crs(epsg=4326) | |
#Polygon1['geometry'] = Polygon1['geometry'].to_crs(epsg=4326) | |
#Polygon2=load_polygon(path+'Flooding/n_inondable_01_01for_s.shp') | |
# Polygon1=load_polygon(path+'risk_zones.shp') | |
# Polygon1['geometry'] = Polygon1['geometry'].to_crs(epsg=4326) | |
# Polygon2=load_polygon(path+'Flooding/n_inondable_01_01for_s.shp') | |
# Polygon3=load_polygon(path+'ZUS/ZUS_FRM_BDA09_L93.shp') | |
# # Function to plot an interactive histogram | |
# # fig = px.histogram(polygon_gdf['poverty'], nbins=20) | |
# # st.plotly_chart(fig) | |
# fig = px.ecdf(polygon_gdf['poverty']) | |
# fig.show() | |
# # # #28% --> 5% of squares | |
# # # | |
# # # | |
# fig = px.ecdf(polygon_gdf['densite']) | |
# # #9000 --> 5% of squares | |
# fig.show() | |
# # | |
# # | |
# # | |
# # #Load geographical layers | |
# | |
# zus=gpd.read_file(path+'ZUS/ZUS_FRM_BDA09_L93.shp') | |
# polygon_gdf = gpd.read_file(path+'Geo_metropole/Filosofi2017_carreaux_nivNaturel_met.shp') | |
# polygon_gdf2 = gpd.read_file(path+'Filosofi2017_carreaux_1km_shp/Filosofi2017_carreaux_1km_met.shp') | |
# polygon_gdf2['densite']=polygon_gdf2['Ind'] | |
# polygon_gdf2['poverty']=polygon_gdf2['Men_pauv']/polygon_gdf2['Men'] | |
# polygon_gdf['tmaille']=pd.to_numeric(polygon_gdf['tmaille']) | |
# polygon_gdf['tmaillem2']=polygon_gdf['tmaille']**2 | |
# polygon_gdf['densite']=1000000*polygon_gdf['Ind']/polygon_gdf['tmaillem2'] | |
# polygon_gdf['poverty']=polygon_gdf['Men_pauv']/polygon_gdf['Men'] | |
# risk_zones2=polygon_gdf2[polygon_gdf2.poverty>=0.30] | |
# risk_zones2=risk_zones2[risk_zones2.densite>=7000] | |
#risk_zones2.to_file(filename=path+'risk_zones2.shp', driver='ESRI Shapefile') | |
# risk_zones=gpd.read_file(filename=path+'risk_zones.shp') | |
# flooding = gpd.read_file(path+'Flooding/n_inondable_01_01for_s.shp') | |
# | |
# # | |
# # | |
# # #FLooding zones | |
#risk_zones=polygon_gdf[polygon_gdf.poverty>=0.28] | |
# risk_zones=risk_zones[risk_zones.densite>=7000] | |
# | |
# risk_zones.to_file(filename=path+'risk_zones.shp', driver='ESRI Shapefile') | |
# | |
# # # | |
# m =folium.Map(location = [48.885805,2.366191], zoom_start = 6) | |
# folium.GeoJson(Polygon2[Polygon2.index<1000],color='blue').add_to(m) | |
#folium.CircleMarker([48.885805, 2.366191],radius=1,color='red').add_to(m) | |
# folium.GeoJson(flooding[flo,color='yellow').add_to(m) | |
# #folium.GeoJson(risk_zones2,color='orange').add_to(m) | |
# folium.GeoJson(zus).add_to(m) | |
# # | |
# # | |
# # # m.save(path+"map2.html") | |
# # # webbrowser.open_new_tab(path+"map2.html") | |
# # # | |
# # # | |
# policies = pd.read_pickle(path+"DB_immoplus.pkl") | |
# geometry = [Point(xy) for xy in zip(policies['longitude'], policies['latitude'])] | |
# policies_geo = gpd.GeoDataFrame(policies, geometry=geometry,crs="EPSG:4326") | |
# # | |
# large_claims=policies_geo[policies_geo.Charge>20000] | |
# large_claims=large_claims.dropna(subset=['latitude']) | |
# # # | |
# for arr in large_claims["geometry"]: | |
# lat=arr.y | |
# lon=arr.x | |
# folium.CircleMarker([lat, lon],radius=1,color='red').add_to(m) | |
# m.save(path+"map2.html") | |
# webbrowser.open_new_tab(path+"map2.html") | |
# | |
# | |
# sum(risk_zones['tmaille'])/sum(polygon_gdf['tmaille'])*100 | |
# sum(risk_zones['Ind'])/sum(polygon_gdf['Ind'])*100 | |
# # | |
# # | |
# # | |
# # flooding["zone_inond_freq"]=1 | |
# # zus['flag_ZUS']=1 | |
# # Function to get address suggestions from the Autocomplete API | |
def create_geodataframe(longitude, latitude): | |
geometry = [Point(longitude, latitude)] | |
gdf = gpd.GeoDataFrame(geometry=geometry, crs="EPSG:4326") | |
return gdf | |
def get_address_suggestions(query): | |
if not query: | |
return [] | |
url = "https://api-adresse.data.gouv.fr/search/" | |
params = {'q': query, 'autocomplete': 1, 'limit': 5} | |
response = requests.get(url, params=params) | |
if response.status_code == 200: | |
data = response.json() | |
suggestions = [{'label': feature['properties']['label'], 'coordinates': feature['geometry']['coordinates']} | |
for feature in data['features']] | |
return suggestions | |
else: | |
return [] | |
# Function to create a map | |
def create_map(latitude, longitude): | |
map_data = pd.DataFrame({ | |
'lat': [latitude], | |
'lon': [longitude] | |
}) | |
st.pydeck_chart(pdk.Deck( | |
map_style='mapbox://styles/mapbox/light-v9', | |
initial_view_state=pdk.ViewState( | |
latitude=latitude, | |
longitude=longitude, | |
zoom=11, | |
pitch=50, | |
), | |
layers=[ | |
pdk.Layer( | |
'ScatterplotLayer', | |
data=map_data, | |
get_position='[lon, lat]', | |
get_color='[200, 30, 0, 160]', | |
get_radius=200, | |
), | |
], | |
)) | |
# Streamlit app layout | |
def main(): | |
st.title("Immopolis Adress validation APP") | |
# Session state to store the current suggestions | |
if 'suggestions' not in st.session_state: | |
st.session_state.suggestions = [] | |
# Text input for address with on_change callback | |
query = st.text_input("Enter your address", "", key="query") | |
# Update suggestions when query changes | |
st.session_state.suggestions = get_address_suggestions(query) | |
# Display autocomplete suggestions | |
if query: | |
selected_suggestion = st.selectbox("Did you mean:", [s['label'] for s in st.session_state.suggestions], index=0, | |
key="selected_suggestion") | |
else: | |
selected_suggestion = "" | |
if selected_suggestion: | |
selected_data = next((item for item in st.session_state.suggestions if item['label'] == selected_suggestion), | |
None) | |
if selected_data and 'coordinates' in selected_data: | |
longitude, latitude = selected_data['coordinates'] | |
st.write(f"Latitude: {latitude}, Longitude: {longitude}") | |
#m = folium.Map(location=[longitude, latitude], zoom_start=6) | |
gdf = create_geodataframe(longitude, latitude) | |
gdf['geometry'] = gdf['geometry'].to_crs(epsg=4326) | |
st.write(gdf) | |
#polygon_name1 = gdf.within(st.session_state.polygons["Polygon1"]) | |
#st.write(polygon_name1[polygon_name1.isna()]) | |
#polygon_name2 = gdf.within(st.session_state.polygons["Polygon2"]) | |
polygon_name1=gpd.sjoin(gdf, st.session_state.polygons["Polygon1"], how="left", predicate="within")['index_right'] | |
st.write(polygon_name1) | |
polygon_name2=gpd.sjoin(gdf, st.session_state.polygons["Polygon2"], how="left", predicate="within")['index_right'] | |
st.write(polygon_name2) | |
polygon_name3=gpd.sjoin(gdf, st.session_state.polygons["Polygon3"], how="left", predicate="within")['index_right'] | |
st.write(polygon_name3) | |
#st.write(polygon_name2) | |
#st.write(np.isnan(polygon_name2[0])) | |
#st.write(polygon_name2) | |
#latitude=48.885805 | |
# longitude=2.366191 | |
#polygon_name3 = gdf.within(st.session_state.polygons["Polygon3"]) | |
# folium.GeoJson(risk_zones).add_to(m) | |
#gdf=policies_geo[1:1] | |
#m =folium.Map(location = [longitude, latitude], zoom_start = 10) | |
m=folium.Map([gdf['geometry'].y, gdf['geometry'].x],zoom_start = 15) | |
folium.CircleMarker([gdf['geometry'].y, gdf['geometry'].x], radius=1, color='red').add_to(m) | |
#folium.GeoJson(st.session_state.polygons["Polygon2"], color='blue').add_to(m) | |
folium.GeoJson(st.session_state.polygons["Polygon3"], color='orange').add_to(m) | |
if not np.isnan(polygon_name1[0]): | |
folium.GeoJson(st.session_state.polygons["Polygon1"][st.session_state.polygons["Polygon1"].index==polygon_name1[0]],color='yellow').add_to(m) | |
if not np.isnan(polygon_name2[0]): | |
folium.GeoJson(st.session_state.polygons["Polygon2"][st.session_state.polygons["Polygon2"].index==polygon_name2[0]],color='blue').add_to(m) | |
if not np.isnan(polygon_name3[0]): | |
folium.GeoJson(st.session_state.polygons["Polygon3"][st.session_state.polygons["Polygon3"].index==polygon_name3[0]],color='orange').add_to(m) | |
point = Point(longitude, latitude) | |
st_folium(m, width=700, height=500) | |
if not np.isnan(polygon_name1[0]): | |
st.markdown(f"Address is High risk zone", unsafe_allow_html=True) | |
if not np.isnan(polygon_name2[0]): | |
st.markdown(f"Address is in flooding area", unsafe_allow_html=True) | |
if not np.isnan(polygon_name3[0]): | |
st.markdown(f"Address is in sensitive area", unsafe_allow_html=True) | |
if np.isnan(polygon_name1[0]) and np.isnan(polygon_name2[0]) and np.isnan(polygon_name3[0]): | |
st.markdown(f"Risk check OK", unsafe_allow_html=True) | |
# Run the app | |
if __name__ == "__main__": | |
main() | |