Spaces:
Running
Running
update app.py
Browse files
app.py
CHANGED
@@ -143,32 +143,30 @@ def calculate_custom_formula(image, geometry, selected_bands, custom_formula, re
|
|
143 |
return ee.Image(0).rename('custom_result').set('error', str(e))
|
144 |
|
145 |
# Aggregation functions
|
146 |
-
def
|
147 |
-
collection = collection.map(lambda image: image.set('
|
148 |
-
grouped_by_day = collection.aggregate_array('
|
149 |
-
def calculate_daily_mean(
|
150 |
-
daily_collection = collection.filter(ee.Filter.eq('
|
151 |
daily_mean = daily_collection.mean()
|
152 |
-
return daily_mean.set('
|
153 |
daily_images = ee.List(grouped_by_day.map(calculate_daily_mean))
|
154 |
return ee.ImageCollection(daily_images)
|
155 |
|
156 |
-
def aggregate_data_weekly(collection
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
|
|
|
|
|
|
162 |
def calculate_weekly_mean(week_start):
|
163 |
-
|
164 |
-
weekly_collection = collection.filterDate(ee.Date(week_start), week_end)
|
165 |
weekly_mean = weekly_collection.mean()
|
166 |
-
return weekly_mean.set(
|
167 |
-
|
168 |
-
'system:time_start': week_start.millis()
|
169 |
-
})
|
170 |
-
|
171 |
-
weekly_images = week_starts.map(calculate_weekly_mean)
|
172 |
return ee.ImageCollection(weekly_images)
|
173 |
|
174 |
def aggregate_data_monthly(collection, start_date, end_date):
|
@@ -192,18 +190,8 @@ def aggregate_data_yearly(collection):
|
|
192 |
yearly_images = ee.List(grouped_by_year.map(calculate_yearly_mean))
|
193 |
return ee.ImageCollection(yearly_images)
|
194 |
|
195 |
-
def aggregate_data_custom(collection):
|
196 |
-
collection = collection.map(lambda image: image.set('day', ee.Date(image.get('system:time_start')).format('YYYY-MM-dd')))
|
197 |
-
grouped_by_day = collection.aggregate_array('day').distinct()
|
198 |
-
def calculate_daily_mean(day):
|
199 |
-
daily_collection = collection.filter(ee.Filter.eq('day', day))
|
200 |
-
daily_mean = daily_collection.mean()
|
201 |
-
return daily_mean.set('day', day)
|
202 |
-
daily_images = ee.List(grouped_by_day.map(calculate_daily_mean))
|
203 |
-
return ee.ImageCollection(daily_images)
|
204 |
-
|
205 |
# Worker function for processing a single geometry
|
206 |
-
def process_single_geometry(row, start_date_str, end_date_str, dataset_id, selected_bands, reducer_choice, shape_type, aggregation_period, custom_formula, kernel_size=None, include_boundary=None):
|
207 |
if shape_type.lower() == "point":
|
208 |
latitude = row.get('latitude')
|
209 |
longitude = row.get('longitude')
|
@@ -233,50 +221,43 @@ def process_single_geometry(row, start_date_str, end_date_str, dataset_id, selec
|
|
233 |
.filterDate(ee.Date(start_date_str), ee.Date(end_date_str)) \
|
234 |
.filterBounds(roi)
|
235 |
|
236 |
-
if aggregation_period.lower() == '
|
237 |
-
collection =
|
238 |
elif aggregation_period.lower() == 'weekly':
|
239 |
-
collection = aggregate_data_weekly(collection
|
240 |
elif aggregation_period.lower() == 'monthly':
|
241 |
collection = aggregate_data_monthly(collection, start_date_str, end_date_str)
|
242 |
elif aggregation_period.lower() == 'yearly':
|
243 |
collection = aggregate_data_yearly(collection)
|
244 |
-
elif aggregation_period.lower() == 'custom (start date to end date)':
|
245 |
-
collection = aggregate_data_custom(collection)
|
246 |
|
247 |
# Process each image in the collection
|
248 |
image_list = collection.toList(collection.size())
|
249 |
-
|
250 |
aggregated_results = []
|
251 |
|
252 |
for i in range(image_list.size().getInfo()):
|
253 |
image = ee.Image(image_list.get(i))
|
254 |
-
|
255 |
-
|
256 |
-
timestamp = image.get('date')
|
257 |
period_label = 'Date'
|
258 |
-
date = ee.
|
259 |
elif aggregation_period.lower() == 'weekly':
|
260 |
timestamp = image.get('week_start')
|
261 |
period_label = 'Week'
|
262 |
date = ee.String(timestamp).getInfo()
|
263 |
if (pd.to_datetime(date) < pd.to_datetime(start_date_str) or
|
264 |
pd.to_datetime(date) > pd.to_datetime(end_date_str) or
|
265 |
-
date in
|
266 |
continue
|
267 |
-
|
268 |
elif aggregation_period.lower() == 'monthly':
|
269 |
timestamp = image.get('month')
|
270 |
period_label = 'Month'
|
271 |
-
date = ee.
|
272 |
elif aggregation_period.lower() == 'yearly':
|
273 |
timestamp = image.get('year')
|
274 |
period_label = 'Year'
|
275 |
-
date = ee.
|
276 |
-
elif aggregation_period.lower() == 'custom (start date to end date)':
|
277 |
-
timestamp = image.get('day')
|
278 |
-
period_label = 'Date'
|
279 |
-
date = ee.String(timestamp).getInfo()
|
280 |
|
281 |
index_image = calculate_custom_formula(image, roi, selected_bands, custom_formula, reducer_choice, scale=30)
|
282 |
try:
|
@@ -295,8 +276,8 @@ def process_single_geometry(row, start_date_str, end_date_str, dataset_id, selec
|
|
295 |
'Calculated Value': calculated_value
|
296 |
}
|
297 |
if shape_type.lower() == 'point':
|
298 |
-
result[
|
299 |
-
result[
|
300 |
aggregated_results.append(result)
|
301 |
except Exception as e:
|
302 |
st.error(f"Error retrieving value for {location_name}: {e}")
|
@@ -304,13 +285,13 @@ def process_single_geometry(row, start_date_str, end_date_str, dataset_id, selec
|
|
304 |
return aggregated_results
|
305 |
|
306 |
# Main processing function
|
307 |
-
def process_aggregation(locations_df, start_date_str, end_date_str, dataset_id, selected_bands, reducer_choice, shape_type, aggregation_period, custom_formula="", kernel_size=None, include_boundary=None):
|
308 |
aggregated_results = []
|
309 |
total_steps = len(locations_df)
|
310 |
progress_bar = st.progress(0)
|
311 |
progress_text = st.empty()
|
312 |
|
313 |
-
start_time = time.time()
|
314 |
with ThreadPoolExecutor(max_workers=10) as executor:
|
315 |
futures = []
|
316 |
for idx, row in locations_df.iterrows():
|
@@ -325,6 +306,8 @@ def process_aggregation(locations_df, start_date_str, end_date_str, dataset_id,
|
|
325 |
shape_type,
|
326 |
aggregation_period,
|
327 |
custom_formula,
|
|
|
|
|
328 |
kernel_size,
|
329 |
include_boundary
|
330 |
)
|
@@ -340,8 +323,9 @@ def process_aggregation(locations_df, start_date_str, end_date_str, dataset_id,
|
|
340 |
progress_bar.progress(progress_percentage)
|
341 |
progress_text.markdown(f"Processing: {int(progress_percentage * 100)}%")
|
342 |
|
|
|
343 |
end_time = time.time()
|
344 |
-
processing_time = end_time - start_time
|
345 |
|
346 |
if aggregated_results:
|
347 |
result_df = pd.DataFrame(aggregated_results)
|
@@ -352,8 +336,8 @@ def process_aggregation(locations_df, start_date_str, end_date_str, dataset_id,
|
|
352 |
'Calculated Value': 'mean'
|
353 |
}
|
354 |
if shape_type.lower() == 'point':
|
355 |
-
agg_dict[
|
356 |
-
agg_dict[
|
357 |
aggregated_output = result_df.groupby('Location Name').agg(agg_dict).reset_index()
|
358 |
aggregated_output.rename(columns={'Calculated Value': 'Aggregated Value'}, inplace=True)
|
359 |
return aggregated_output.to_dict(orient='records'), processing_time
|
@@ -494,8 +478,8 @@ start_date_str = start_date.strftime('%Y-%m-%d')
|
|
494 |
end_date_str = end_date.strftime('%Y-%m-%d')
|
495 |
|
496 |
aggregation_period = st.selectbox(
|
497 |
-
"Select Aggregation Period",
|
498 |
-
["
|
499 |
index=0
|
500 |
)
|
501 |
|
@@ -520,15 +504,60 @@ elif shape_type.lower() == "polygon":
|
|
520 |
|
521 |
file_upload = st.file_uploader(f"Upload your {shape_type} data (CSV, GeoJSON, KML)", type=["csv", "geojson", "kml"])
|
522 |
locations_df = pd.DataFrame()
|
523 |
-
|
524 |
-
|
525 |
|
526 |
if file_upload is not None:
|
527 |
if shape_type.lower() == "point":
|
528 |
if file_upload.name.endswith('.csv'):
|
|
|
529 |
locations_df = pd.read_csv(file_upload)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
530 |
elif file_upload.name.endswith('.geojson'):
|
531 |
locations_df = gpd.read_file(file_upload)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
532 |
elif file_upload.name.endswith('.kml'):
|
533 |
kml_string = file_upload.read().decode('utf-8')
|
534 |
try:
|
@@ -543,39 +572,38 @@ if file_upload is not None:
|
|
543 |
coords = [c.strip() for c in coords_text.split(',')]
|
544 |
if len(coords) >= 2:
|
545 |
lon, lat = float(coords[0]), float(coords[1])
|
546 |
-
points.append({'name': name, '
|
547 |
if not points:
|
548 |
st.error("No valid Point data found in the KML file.")
|
549 |
-
locations_df = pd.DataFrame()
|
550 |
else:
|
551 |
-
locations_df =
|
|
|
|
|
|
|
|
|
552 |
except Exception as e:
|
553 |
st.error(f"Error parsing KML file: {str(e)}")
|
554 |
-
|
555 |
-
|
556 |
-
|
557 |
-
|
558 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
559 |
|
560 |
-
if len(numeric_columns) >= 2:
|
561 |
-
col1, col2 = st.columns(2)
|
562 |
-
with col1:
|
563 |
-
lat_col = st.selectbox("Select Latitude Column", numeric_columns,
|
564 |
-
index=numeric_columns.index('latitude') if 'latitude' in numeric_columns else 0)
|
565 |
-
with col2:
|
566 |
-
lon_col = st.selectbox("Select Longitude Column", numeric_columns,
|
567 |
-
index=numeric_columns.index('longitude') if 'longitude' in numeric_columns else 1)
|
568 |
-
|
569 |
-
# Create geometry column from selected lat/lon columns
|
570 |
-
locations_df['geometry'] = gpd.points_from_xy(locations_df[lon_col], locations_df[lat_col])
|
571 |
-
else:
|
572 |
-
st.error("Not enough numeric columns found for latitude/longitude selection.")
|
573 |
-
|
574 |
elif shape_type.lower() == "polygon":
|
575 |
if file_upload.name.endswith('.csv'):
|
576 |
-
|
577 |
elif file_upload.name.endswith('.geojson'):
|
578 |
locations_df = gpd.read_file(file_upload)
|
|
|
|
|
|
|
579 |
elif file_upload.name.endswith('.kml'):
|
580 |
kml_string = file_upload.read().decode('utf-8')
|
581 |
try:
|
@@ -593,42 +621,23 @@ if file_upload is not None:
|
|
593 |
polygons.append({'name': name, 'geometry': f"POLYGON (({coords_str}))"})
|
594 |
if not polygons:
|
595 |
st.error("No valid Polygon data found in the KML file.")
|
596 |
-
locations_df = pd.DataFrame()
|
597 |
else:
|
598 |
locations_df = gpd.GeoDataFrame(polygons, geometry=gpd.GeoSeries.from_wkt([p['geometry'] for p in polygons]), crs="EPSG:4326")
|
599 |
except Exception as e:
|
600 |
st.error(f"Error parsing KML file: {str(e)}")
|
601 |
-
|
602 |
-
|
603 |
-
|
604 |
-
|
605 |
-
|
606 |
-
|
607 |
-
|
608 |
-
|
609 |
-
|
610 |
-
|
611 |
-
|
612 |
-
|
613 |
-
|
614 |
-
latitude = row['latitude']
|
615 |
-
longitude = row['longitude']
|
616 |
-
if pd.isna(latitude) or pd.isna(longitude):
|
617 |
-
continue
|
618 |
-
m.add_marker(location=[latitude, longitude], popup=row.get('name', 'No Name'))
|
619 |
-
st.write("Map of Uploaded Points:")
|
620 |
-
m.to_streamlit()
|
621 |
-
elif shape_type.lower() == "polygon":
|
622 |
-
centroid_lat = locations_df.geometry.centroid.y.mean()
|
623 |
-
centroid_lon = locations_df.geometry.centroid.x.mean()
|
624 |
-
m = leafmap.Map(center=[centroid_lat, centroid_lon], zoom=10)
|
625 |
-
for _, row in locations_df.iterrows():
|
626 |
-
polygon = row['geometry']
|
627 |
-
if polygon.is_valid:
|
628 |
-
gdf = gpd.GeoDataFrame([row], geometry=[polygon], crs=locations_df.crs)
|
629 |
-
m.add_gdf(gdf=gdf, layer_name=row.get('name', 'Unnamed Polygon'))
|
630 |
-
st.write("Map of Uploaded Polygons:")
|
631 |
-
m.to_streamlit()
|
632 |
|
633 |
if st.button(f"Calculate {custom_formula}"):
|
634 |
if not locations_df.empty:
|
@@ -643,6 +652,8 @@ if st.button(f"Calculate {custom_formula}"):
|
|
643 |
reducer_choice,
|
644 |
shape_type,
|
645 |
aggregation_period,
|
|
|
|
|
646 |
custom_formula,
|
647 |
kernel_size,
|
648 |
include_boundary
|
@@ -669,4 +680,4 @@ if st.button(f"Calculate {custom_formula}"):
|
|
669 |
except Exception as e:
|
670 |
st.error(f"An error occurred during processing: {str(e)}")
|
671 |
else:
|
672 |
-
st.warning("Please upload a file to proceed.")
|
|
|
143 |
return ee.Image(0).rename('custom_result').set('error', str(e))
|
144 |
|
145 |
# Aggregation functions
|
146 |
+
def aggregate_data_custom(collection):
|
147 |
+
collection = collection.map(lambda image: image.set('day', ee.Date(image.get('system:time_start')).format('YYYY-MM-dd')))
|
148 |
+
grouped_by_day = collection.aggregate_array('day').distinct()
|
149 |
+
def calculate_daily_mean(day):
|
150 |
+
daily_collection = collection.filter(ee.Filter.eq('day', day))
|
151 |
daily_mean = daily_collection.mean()
|
152 |
+
return daily_mean.set('day', day)
|
153 |
daily_images = ee.List(grouped_by_day.map(calculate_daily_mean))
|
154 |
return ee.ImageCollection(daily_images)
|
155 |
|
156 |
+
def aggregate_data_weekly(collection):
|
157 |
+
def set_week_start(image):
|
158 |
+
date = ee.Date(image.get('system:time_start'))
|
159 |
+
days_since_week_start = date.getRelative('day', 'week')
|
160 |
+
offset = ee.Number(days_since_week_start).multiply(-1)
|
161 |
+
week_start = date.advance(offset, 'day')
|
162 |
+
return image.set('week_start', week_start.format('YYYY-MM-dd'))
|
163 |
+
collection = collection.map(set_week_start)
|
164 |
+
grouped_by_week = collection.aggregate_array('week_start').distinct()
|
165 |
def calculate_weekly_mean(week_start):
|
166 |
+
weekly_collection = collection.filter(ee.Filter.eq('week_start', week_start))
|
|
|
167 |
weekly_mean = weekly_collection.mean()
|
168 |
+
return weekly_mean.set('week_start', week_start)
|
169 |
+
weekly_images = ee.List(grouped_by_week.map(calculate_weekly_mean))
|
|
|
|
|
|
|
|
|
170 |
return ee.ImageCollection(weekly_images)
|
171 |
|
172 |
def aggregate_data_monthly(collection, start_date, end_date):
|
|
|
190 |
yearly_images = ee.List(grouped_by_year.map(calculate_yearly_mean))
|
191 |
return ee.ImageCollection(yearly_images)
|
192 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
193 |
# Worker function for processing a single geometry
|
194 |
+
def process_single_geometry(row, start_date_str, end_date_str, dataset_id, selected_bands, reducer_choice, shape_type, aggregation_period, custom_formula, original_lat_col, original_lon_col, kernel_size=None, include_boundary=None):
|
195 |
if shape_type.lower() == "point":
|
196 |
latitude = row.get('latitude')
|
197 |
longitude = row.get('longitude')
|
|
|
221 |
.filterDate(ee.Date(start_date_str), ee.Date(end_date_str)) \
|
222 |
.filterBounds(roi)
|
223 |
|
224 |
+
if aggregation_period.lower() == 'custom (start date to end date)':
|
225 |
+
collection = aggregate_data_custom(collection)
|
226 |
elif aggregation_period.lower() == 'weekly':
|
227 |
+
collection = aggregate_data_weekly(collection)
|
228 |
elif aggregation_period.lower() == 'monthly':
|
229 |
collection = aggregate_data_monthly(collection, start_date_str, end_date_str)
|
230 |
elif aggregation_period.lower() == 'yearly':
|
231 |
collection = aggregate_data_yearly(collection)
|
|
|
|
|
232 |
|
233 |
# Process each image in the collection
|
234 |
image_list = collection.toList(collection.size())
|
235 |
+
processed_weeks = set()
|
236 |
aggregated_results = []
|
237 |
|
238 |
for i in range(image_list.size().getInfo()):
|
239 |
image = ee.Image(image_list.get(i))
|
240 |
+
if aggregation_period.lower() == 'custom (start date to end date)':
|
241 |
+
timestamp = image.get('day')
|
|
|
242 |
period_label = 'Date'
|
243 |
+
date = ee.Date(timestamp).format('YYYY-MM-dd').getInfo()
|
244 |
elif aggregation_period.lower() == 'weekly':
|
245 |
timestamp = image.get('week_start')
|
246 |
period_label = 'Week'
|
247 |
date = ee.String(timestamp).getInfo()
|
248 |
if (pd.to_datetime(date) < pd.to_datetime(start_date_str) or
|
249 |
pd.to_datetime(date) > pd.to_datetime(end_date_str) or
|
250 |
+
date in processed_weeks):
|
251 |
continue
|
252 |
+
processed_weeks.add(date)
|
253 |
elif aggregation_period.lower() == 'monthly':
|
254 |
timestamp = image.get('month')
|
255 |
period_label = 'Month'
|
256 |
+
date = ee.Date(timestamp).format('YYYY-MM').getInfo()
|
257 |
elif aggregation_period.lower() == 'yearly':
|
258 |
timestamp = image.get('year')
|
259 |
period_label = 'Year'
|
260 |
+
date = ee.Date(timestamp).format('YYYY').getInfo()
|
|
|
|
|
|
|
|
|
261 |
|
262 |
index_image = calculate_custom_formula(image, roi, selected_bands, custom_formula, reducer_choice, scale=30)
|
263 |
try:
|
|
|
276 |
'Calculated Value': calculated_value
|
277 |
}
|
278 |
if shape_type.lower() == 'point':
|
279 |
+
result[original_lat_col] = latitude # Use original column name
|
280 |
+
result[original_lon_col] = longitude # Use original column name
|
281 |
aggregated_results.append(result)
|
282 |
except Exception as e:
|
283 |
st.error(f"Error retrieving value for {location_name}: {e}")
|
|
|
285 |
return aggregated_results
|
286 |
|
287 |
# Main processing function
|
288 |
+
def process_aggregation(locations_df, start_date_str, end_date_str, dataset_id, selected_bands, reducer_choice, shape_type, aggregation_period, original_lat_col, original_lon_col, custom_formula="", kernel_size=None, include_boundary=None):
|
289 |
aggregated_results = []
|
290 |
total_steps = len(locations_df)
|
291 |
progress_bar = st.progress(0)
|
292 |
progress_text = st.empty()
|
293 |
|
294 |
+
start_time = time.time() # Start timing the process
|
295 |
with ThreadPoolExecutor(max_workers=10) as executor:
|
296 |
futures = []
|
297 |
for idx, row in locations_df.iterrows():
|
|
|
306 |
shape_type,
|
307 |
aggregation_period,
|
308 |
custom_formula,
|
309 |
+
original_lat_col,
|
310 |
+
original_lon_col,
|
311 |
kernel_size,
|
312 |
include_boundary
|
313 |
)
|
|
|
323 |
progress_bar.progress(progress_percentage)
|
324 |
progress_text.markdown(f"Processing: {int(progress_percentage * 100)}%")
|
325 |
|
326 |
+
# End timing the process
|
327 |
end_time = time.time()
|
328 |
+
processing_time = end_time - start_time # Calculate total processing time
|
329 |
|
330 |
if aggregated_results:
|
331 |
result_df = pd.DataFrame(aggregated_results)
|
|
|
336 |
'Calculated Value': 'mean'
|
337 |
}
|
338 |
if shape_type.lower() == 'point':
|
339 |
+
agg_dict[original_lat_col] = 'first'
|
340 |
+
agg_dict[original_lon_col] = 'first'
|
341 |
aggregated_output = result_df.groupby('Location Name').agg(agg_dict).reset_index()
|
342 |
aggregated_output.rename(columns={'Calculated Value': 'Aggregated Value'}, inplace=True)
|
343 |
return aggregated_output.to_dict(orient='records'), processing_time
|
|
|
478 |
end_date_str = end_date.strftime('%Y-%m-%d')
|
479 |
|
480 |
aggregation_period = st.selectbox(
|
481 |
+
"Select Aggregation Period (e.g, Custom(Start Date to End Date) , Weekly , Monthly , Yearly)",
|
482 |
+
["Custom (Start Date to End Date)", "Weekly", "Monthly", "Yearly"],
|
483 |
index=0
|
484 |
)
|
485 |
|
|
|
504 |
|
505 |
file_upload = st.file_uploader(f"Upload your {shape_type} data (CSV, GeoJSON, KML)", type=["csv", "geojson", "kml"])
|
506 |
locations_df = pd.DataFrame()
|
507 |
+
original_lat_col = None
|
508 |
+
original_lon_col = None
|
509 |
|
510 |
if file_upload is not None:
|
511 |
if shape_type.lower() == "point":
|
512 |
if file_upload.name.endswith('.csv'):
|
513 |
+
# Read the CSV file
|
514 |
locations_df = pd.read_csv(file_upload)
|
515 |
+
|
516 |
+
# Show the first few rows to help user identify columns
|
517 |
+
st.write("Preview of your uploaded data (first 5 rows):")
|
518 |
+
st.dataframe(locations_df.head())
|
519 |
+
|
520 |
+
# Get all column names from the uploaded file
|
521 |
+
all_columns = locations_df.columns.tolist()
|
522 |
+
|
523 |
+
# Let user select latitude and longitude columns from dropdown
|
524 |
+
col1, col2 = st.columns(2)
|
525 |
+
with col1:
|
526 |
+
original_lat_col = st.selectbox(
|
527 |
+
"Select Latitude Column",
|
528 |
+
options=all_columns,
|
529 |
+
index=all_columns.index('latitude') if 'latitude' in all_columns else 0,
|
530 |
+
help="Select the column containing latitude values"
|
531 |
+
)
|
532 |
+
with col2:
|
533 |
+
original_lon_col = st.selectbox(
|
534 |
+
"Select Longitude Column",
|
535 |
+
options=all_columns,
|
536 |
+
index=all_columns.index('longitude') if 'longitude' in all_columns else 0,
|
537 |
+
help="Select the column containing longitude values"
|
538 |
+
)
|
539 |
+
|
540 |
+
# Validate the selected columns contain numeric data
|
541 |
+
if not pd.api.types.is_numeric_dtype(locations_df[original_lat_col]) or not pd.api.types.is_numeric_dtype(locations_df[original_lon_col]):
|
542 |
+
st.error("Error: Selected Latitude and Longitude columns must contain numeric values")
|
543 |
+
st.stop()
|
544 |
+
|
545 |
+
# Rename the selected columns to standard names for processing
|
546 |
+
locations_df = locations_df.rename(columns={
|
547 |
+
original_lat_col: 'latitude',
|
548 |
+
original_lon_col: 'longitude'
|
549 |
+
})
|
550 |
+
|
551 |
elif file_upload.name.endswith('.geojson'):
|
552 |
locations_df = gpd.read_file(file_upload)
|
553 |
+
if 'geometry' in locations_df.columns:
|
554 |
+
locations_df['latitude'] = locations_df['geometry'].y
|
555 |
+
locations_df['longitude'] = locations_df['geometry'].x
|
556 |
+
original_lat_col = 'latitude'
|
557 |
+
original_lon_col = 'longitude'
|
558 |
+
else:
|
559 |
+
st.error("GeoJSON file doesn't contain geometry column")
|
560 |
+
st.stop()
|
561 |
elif file_upload.name.endswith('.kml'):
|
562 |
kml_string = file_upload.read().decode('utf-8')
|
563 |
try:
|
|
|
572 |
coords = [c.strip() for c in coords_text.split(',')]
|
573 |
if len(coords) >= 2:
|
574 |
lon, lat = float(coords[0]), float(coords[1])
|
575 |
+
points.append({'name': name, 'geometry': f"POINT ({lon} {lat})"})
|
576 |
if not points:
|
577 |
st.error("No valid Point data found in the KML file.")
|
|
|
578 |
else:
|
579 |
+
locations_df = gpd.GeoDataFrame(points, geometry=gpd.GeoSeries.from_wkt([p['geometry'] for p in points]), crs="EPSG:4326")
|
580 |
+
locations_df['latitude'] = locations_df['geometry'].y
|
581 |
+
locations_df['longitude'] = locations_df['geometry'].x
|
582 |
+
original_lat_col = 'latitude'
|
583 |
+
original_lon_col = 'longitude'
|
584 |
except Exception as e:
|
585 |
st.error(f"Error parsing KML file: {str(e)}")
|
586 |
+
|
587 |
+
# Display map for points if we have valid data
|
588 |
+
if not locations_df.empty and 'latitude' in locations_df.columns and 'longitude' in locations_df.columns:
|
589 |
+
m = leafmap.Map(center=[locations_df['latitude'].mean(), locations_df['longitude'].mean()], zoom=10)
|
590 |
+
for _, row in locations_df.iterrows():
|
591 |
+
latitude = row['latitude']
|
592 |
+
longitude = row['longitude']
|
593 |
+
if pd.isna(latitude) or pd.isna(longitude):
|
594 |
+
continue
|
595 |
+
m.add_marker(location=[latitude, longitude], popup=row.get('name', 'No Name'))
|
596 |
+
st.write("Map of Uploaded Points:")
|
597 |
+
m.to_streamlit()
|
598 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
599 |
elif shape_type.lower() == "polygon":
|
600 |
if file_upload.name.endswith('.csv'):
|
601 |
+
st.error("CSV upload not supported for polygons. Please upload a GeoJSON or KML file.")
|
602 |
elif file_upload.name.endswith('.geojson'):
|
603 |
locations_df = gpd.read_file(file_upload)
|
604 |
+
if 'geometry' not in locations_df.columns:
|
605 |
+
st.error("GeoJSON file doesn't contain geometry column")
|
606 |
+
st.stop()
|
607 |
elif file_upload.name.endswith('.kml'):
|
608 |
kml_string = file_upload.read().decode('utf-8')
|
609 |
try:
|
|
|
621 |
polygons.append({'name': name, 'geometry': f"POLYGON (({coords_str}))"})
|
622 |
if not polygons:
|
623 |
st.error("No valid Polygon data found in the KML file.")
|
|
|
624 |
else:
|
625 |
locations_df = gpd.GeoDataFrame(polygons, geometry=gpd.GeoSeries.from_wkt([p['geometry'] for p in polygons]), crs="EPSG:4326")
|
626 |
except Exception as e:
|
627 |
st.error(f"Error parsing KML file: {str(e)}")
|
628 |
+
|
629 |
+
# Display map for polygons if we have valid data
|
630 |
+
if not locations_df.empty and 'geometry' in locations_df.columns:
|
631 |
+
centroid_lat = locations_df.geometry.centroid.y.mean()
|
632 |
+
centroid_lon = locations_df.geometry.centroid.x.mean()
|
633 |
+
m = leafmap.Map(center=[centroid_lat, centroid_lon], zoom=10)
|
634 |
+
for _, row in locations_df.iterrows():
|
635 |
+
polygon = row['geometry']
|
636 |
+
if polygon.is_valid:
|
637 |
+
gdf = gpd.GeoDataFrame([row], geometry=[polygon], crs=locations_df.crs)
|
638 |
+
m.add_gdf(gdf=gdf, layer_name=row.get('name', 'Unnamed Polygon'))
|
639 |
+
st.write("Map of Uploaded Polygons:")
|
640 |
+
m.to_streamlit()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
641 |
|
642 |
if st.button(f"Calculate {custom_formula}"):
|
643 |
if not locations_df.empty:
|
|
|
652 |
reducer_choice,
|
653 |
shape_type,
|
654 |
aggregation_period,
|
655 |
+
original_lat_col,
|
656 |
+
original_lon_col,
|
657 |
custom_formula,
|
658 |
kernel_size,
|
659 |
include_boundary
|
|
|
680 |
except Exception as e:
|
681 |
st.error(f"An error occurred during processing: {str(e)}")
|
682 |
else:
|
683 |
+
st.warning("Please upload a valid file to proceed.")
|