YashMK89 commited on
Commit
23a1500
·
verified ·
1 Parent(s): 626c6f1

update app.py

Browse files
Files changed (1) hide show
  1. app.py +738 -738
app.py CHANGED
@@ -1,739 +1,739 @@
1
- import streamlit as st
2
- import json
3
- import ee
4
- import os
5
- import pandas as pd
6
- import geopandas as gpd
7
- from datetime import datetime
8
- import leafmap.foliumap as leafmap
9
- import re
10
- from shapely.geometry import base
11
- from xml.etree import ElementTree as XET
12
- from concurrent.futures import ThreadPoolExecutor, as_completed
13
- import time
14
- import matplotlib.pyplot as plt
15
- import plotly.express as px
16
-
17
- # Set up the page layout
18
- st.set_page_config(layout="wide")
19
-
20
- # Custom button styling
21
- m = st.markdown(
22
- """
23
- <style>
24
- div.stButton > button:first-child {
25
- background-color: #006400;
26
- color:#ffffff;
27
- }
28
- </style>""",
29
- unsafe_allow_html=True,
30
- )
31
-
32
- # Logo and Title
33
- st.write(
34
- f"""
35
- <div style="display: flex; justify-content: space-between; align-items: center;">
36
- <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/ISRO_Logo.png" style="width: 20%; margin-right: auto;">
37
- <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/SAC_Logo.png" style="width: 20%; margin-left: auto;">
38
- </div>
39
- """,
40
- unsafe_allow_html=True,
41
- )
42
- st.markdown(
43
- f"""
44
- <div style="display: flex; flex-direction: column; align-items: center;">
45
- <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/SATRANG.png" style="width: 30%;">
46
- <h3 style="text-align: center; margin: 0;">( Spatial and Temporal Aggregation for Remote-sensing Analysis of GEE Data )</h3>
47
- </div>
48
- <hr>
49
- """,
50
- unsafe_allow_html=True,
51
- )
52
-
53
- # Authenticate and initialize Earth Engine
54
- earthengine_credentials = os.environ.get("EE_Authentication")
55
- os.makedirs(os.path.expanduser("~/.config/earthengine/"), exist_ok=True)
56
- with open(os.path.expanduser("~/.config/earthengine/credentials"), "w") as f:
57
- f.write(earthengine_credentials)
58
- ee.Initialize(project='ee-yashsacisro24')
59
-
60
- # Helper function to get reducer
61
- def get_reducer(reducer_name):
62
- reducers = {
63
- 'mean': ee.Reducer.mean(),
64
- 'sum': ee.Reducer.sum(),
65
- 'median': ee.Reducer.median(),
66
- 'min': ee.Reducer.min(),
67
- 'max': ee.Reducer.max(),
68
- 'count': ee.Reducer.count(),
69
- }
70
- return reducers.get(reducer_name.lower(), ee.Reducer.mean())
71
-
72
- # Function to convert geometry to Earth Engine format
73
- def convert_to_ee_geometry(geometry):
74
- if isinstance(geometry, base.BaseGeometry):
75
- if geometry.is_valid:
76
- geojson = geometry.__geo_interface__
77
- return ee.Geometry(geojson)
78
- else:
79
- raise ValueError("Invalid geometry: The polygon geometry is not valid.")
80
- elif isinstance(geometry, dict) or isinstance(geometry, str):
81
- try:
82
- if isinstance(geometry, str):
83
- geometry = json.loads(geometry)
84
- if 'type' in geometry and 'coordinates' in geometry:
85
- return ee.Geometry(geometry)
86
- else:
87
- raise ValueError("GeoJSON format is invalid.")
88
- except Exception as e:
89
- raise ValueError(f"Error parsing GeoJSON: {e}")
90
- elif isinstance(geometry, str) and geometry.lower().endswith(".kml"):
91
- try:
92
- tree = XET.parse(geometry)
93
- kml_root = tree.getroot()
94
- kml_namespace = {'kml': 'http://www.opengis.net/kml/2.2'}
95
- coordinates = kml_root.findall(".//kml:coordinates", kml_namespace)
96
- if coordinates:
97
- coords_text = coordinates[0].text.strip()
98
- coords = coords_text.split()
99
- coords = [tuple(map(float, coord.split(','))) for coord in coords]
100
- geojson = {"type": "Polygon", "coordinates": [coords]}
101
- return ee.Geometry(geojson)
102
- else:
103
- raise ValueError("KML does not contain valid coordinates.")
104
- except Exception as e:
105
- raise ValueError(f"Error parsing KML: {e}")
106
- else:
107
- raise ValueError("Unsupported geometry input type. Supported types are Shapely, GeoJSON, and KML.")
108
-
109
- # Function to calculate custom formula
110
- def calculate_custom_formula(image, geometry, selected_bands, custom_formula, reducer_choice, dataset_id, user_scale=None):
111
- try:
112
- # Determine the scale: Use user-defined scale if provided, otherwise use dataset's native resolution
113
- default_scale = ee.ImageCollection(dataset_id).first().select(0).projection().nominalScale().getInfo()
114
- scale = user_scale if user_scale is not None else default_scale
115
- band_values = {}
116
- band_names = image.bandNames().getInfo()
117
- for band in selected_bands:
118
- if band not in band_names:
119
- raise ValueError(f"Band '{band}' not found in the dataset.")
120
- band_values[band] = image.select(band)
121
- reducer = get_reducer(reducer_choice)
122
- reduced_values = {}
123
- for band in selected_bands:
124
- value = band_values[band].reduceRegion(
125
- reducer=reducer,
126
- geometry=geometry,
127
- scale=scale
128
- ).get(band).getInfo()
129
- reduced_values[band] = float(value if value is not None else 0)
130
- formula = custom_formula
131
- for band in selected_bands:
132
- formula = formula.replace(band, str(reduced_values[band]))
133
- result = eval(formula, {"__builtins__": {}}, reduced_values)
134
- if not isinstance(result, (int, float)):
135
- raise ValueError("Formula did not result in a numeric value.")
136
- return ee.Image.constant(result).rename('custom_result')
137
- except ZeroDivisionError:
138
- st.error("Error: Division by zero in the formula.")
139
- return ee.Image(0).rename('custom_result').set('error', 'Division by zero')
140
- except SyntaxError:
141
- st.error(f"Error: Invalid syntax in formula '{custom_formula}'.")
142
- return ee.Image(0).rename('custom_result').set('error', 'Invalid syntax')
143
- except ValueError as e:
144
- st.error(f"Error: {str(e)}")
145
- return ee.Image(0).rename('custom_result').set('error', str(e))
146
- except Exception as e:
147
- st.error(f"Unexpected error: {e}")
148
- return ee.Image(0).rename('custom_result').set('error', str(e))
149
-
150
- # Aggregation functions
151
- def aggregate_data_custom(collection):
152
- collection = collection.map(lambda image: image.set('day', ee.Date(image.get('system:time_start')).format('YYYY-MM-dd')))
153
- grouped_by_day = collection.aggregate_array('day').distinct()
154
- def calculate_daily_mean(day):
155
- daily_collection = collection.filter(ee.Filter.eq('day', day))
156
- daily_mean = daily_collection.mean()
157
- return daily_mean.set('day', day)
158
- daily_images = ee.List(grouped_by_day.map(calculate_daily_mean))
159
- return ee.ImageCollection(daily_images)
160
-
161
- def aggregate_data_daily(collection):
162
- def set_day_start(image):
163
- date = ee.Date(image.get('system:time_start'))
164
- day_start = date.format('YYYY-MM-dd')
165
- return image.set('day_start', day_start)
166
- collection = collection.map(set_day_start)
167
- grouped_by_day = collection.aggregate_array('day_start').distinct()
168
- def calculate_daily_mean(day_start):
169
- daily_collection = collection.filter(ee.Filter.eq('day_start', day_start))
170
- daily_mean = daily_collection.mean()
171
- return daily_mean.set('day_start', day_start)
172
- daily_images = ee.List(grouped_by_day.map(calculate_daily_mean))
173
- return ee.ImageCollection(daily_images)
174
-
175
- def aggregate_data_weekly(collection, start_date_str, end_date_str):
176
- start_date = ee.Date(start_date_str)
177
- end_date = ee.Date(end_date_str)
178
- days_diff = end_date.difference(start_date, 'day')
179
- num_weeks = days_diff.divide(7).ceil().getInfo()
180
- weekly_images = []
181
- for week in range(num_weeks):
182
- week_start = start_date.advance(week * 7, 'day')
183
- week_end = week_start.advance(7, 'day')
184
- weekly_collection = collection.filterDate(week_start, week_end)
185
- if weekly_collection.size().getInfo() > 0:
186
- weekly_mean = weekly_collection.mean()
187
- weekly_mean = weekly_mean.set('week_start', week_start.format('YYYY-MM-dd'))
188
- weekly_images.append(weekly_mean)
189
- return ee.ImageCollection.fromImages(weekly_images)
190
-
191
- def aggregate_data_monthly(collection, start_date, end_date):
192
- collection = collection.filterDate(start_date, end_date)
193
- collection = collection.map(lambda image: image.set('month', ee.Date(image.get('system:time_start')).format('YYYY-MM')))
194
- grouped_by_month = collection.aggregate_array('month').distinct()
195
- def calculate_monthly_mean(month):
196
- monthly_collection = collection.filter(ee.Filter.eq('month', month))
197
- monthly_mean = monthly_collection.mean()
198
- return monthly_mean.set('month', month)
199
- monthly_images = ee.List(grouped_by_month.map(calculate_monthly_mean))
200
- return ee.ImageCollection(monthly_images)
201
-
202
- def aggregate_data_yearly(collection):
203
- collection = collection.map(lambda image: image.set('year', ee.Date(image.get('system:time_start')).format('YYYY')))
204
- grouped_by_year = collection.aggregate_array('year').distinct()
205
- def calculate_yearly_mean(year):
206
- yearly_collection = collection.filter(ee.Filter.eq('year', year))
207
- yearly_mean = yearly_collection.mean()
208
- return yearly_mean.set('year', year)
209
- yearly_images = ee.List(grouped_by_year.map(calculate_yearly_mean))
210
- return ee.ImageCollection(yearly_images)
211
-
212
- def preprocess_collection(collection, tile_cloud_threshold, pixel_cloud_threshold):
213
- def filter_tile(image):
214
- cloud_percentage = calculate_cloud_percentage(image, cloud_band='QA60')
215
- return image.set('cloud_percentage', cloud_percentage).updateMask(cloud_percentage.lt(tile_cloud_threshold))
216
- def mask_cloudy_pixels(image):
217
- qa60 = image.select('QA60')
218
- opaque_clouds = qa60.bitwiseAnd(1 << 10)
219
- cirrus_clouds = qa60.bitwiseAnd(1 << 11)
220
- cloud_mask = opaque_clouds.Or(cirrus_clouds)
221
- clear_pixels = cloud_mask.Not()
222
- return image.updateMask(clear_pixels)
223
- filtered_collection = collection.map(filter_tile)
224
- masked_collection = filtered_collection.map(mask_cloudy_pixels)
225
- return masked_collection
226
-
227
- def process_single_geometry(row, start_date_str, end_date_str, dataset_id, selected_bands, reducer_choice, shape_type, aggregation_period, custom_formula, original_lat_col, original_lon_col, kernel_size=None, include_boundary=None, user_scale=None):
228
- if shape_type.lower() == "point":
229
- latitude = row.get('latitude')
230
- longitude = row.get('longitude')
231
- if pd.isna(latitude) or pd.isna(longitude):
232
- return None
233
- location_name = row.get('name', f"Location_{row.name}")
234
- if kernel_size == "3x3 Kernel":
235
- buffer_size = 45
236
- roi = ee.Geometry.Point([longitude, latitude]).buffer(buffer_size).bounds()
237
- elif kernel_size == "5x5 Kernel":
238
- buffer_size = 75
239
- roi = ee.Geometry.Point([longitude, latitude]).buffer(buffer_size).bounds()
240
- else:
241
- roi = ee.Geometry.Point([longitude, latitude])
242
- elif shape_type.lower() == "polygon":
243
- polygon_geometry = row.get('geometry')
244
- location_name = row.get('name', f"Polygon_{row.name}")
245
- try:
246
- roi = convert_to_ee_geometry(polygon_geometry)
247
- if not include_boundary:
248
- roi = roi.buffer(-30).bounds()
249
- except ValueError:
250
- return None
251
- collection = ee.ImageCollection(dataset_id) \
252
- .filterDate(ee.Date(start_date_str), ee.Date(end_date_str)) \
253
- .filterBounds(roi)
254
- if aggregation_period.lower() == 'custom (start date to end date)':
255
- collection = aggregate_data_custom(collection)
256
- elif aggregation_period.lower() == 'daily':
257
- collection = aggregate_data_daily(collection)
258
- elif aggregation_period.lower() == 'weekly':
259
- collection = aggregate_data_weekly(collection, start_date_str, end_date_str)
260
- elif aggregation_period.lower() == 'monthly':
261
- collection = aggregate_data_monthly(collection, start_date_str, end_date_str)
262
- elif aggregation_period.lower() == 'yearly':
263
- collection = aggregate_data_yearly(collection)
264
- image_list = collection.toList(collection.size())
265
- processed_weeks = set()
266
- aggregated_results = []
267
- for i in range(image_list.size().getInfo()):
268
- image = ee.Image(image_list.get(i))
269
- if aggregation_period.lower() == 'custom (start date to end date)':
270
- timestamp = image.get('day')
271
- period_label = 'Date'
272
- date = ee.Date(timestamp).format('YYYY-MM-dd').getInfo()
273
- elif aggregation_period.lower() == 'daily':
274
- timestamp = image.get('day_start')
275
- period_label = 'Date'
276
- date = ee.String(timestamp).getInfo()
277
- elif aggregation_period.lower() == 'weekly':
278
- timestamp = image.get('week_start')
279
- period_label = 'Week'
280
- date = ee.String(timestamp).getInfo()
281
- if (pd.to_datetime(date) < pd.to_datetime(start_date_str) or
282
- pd.to_datetime(date) > pd.to_datetime(end_date_str) or
283
- date in processed_weeks):
284
- continue
285
- processed_weeks.add(date)
286
- elif aggregation_period.lower() == 'monthly':
287
- timestamp = image.get('month')
288
- period_label = 'Month'
289
- date = ee.Date(timestamp).format('YYYY-MM').getInfo()
290
- elif aggregation_period.lower() == 'yearly':
291
- timestamp = image.get('year')
292
- period_label = 'Year'
293
- date = ee.Date(timestamp).format('YYYY').getInfo()
294
- index_image = calculate_custom_formula(image, roi, selected_bands, custom_formula, reducer_choice, dataset_id, user_scale=user_scale)
295
- try:
296
- index_value = index_image.reduceRegion(
297
- reducer=get_reducer(reducer_choice),
298
- geometry=roi,
299
- scale=user_scale
300
- ).get('custom_result')
301
- calculated_value = index_value.getInfo()
302
- if isinstance(calculated_value, (int, float)):
303
- result = {
304
- 'Location Name': location_name,
305
- period_label: date,
306
- 'Start Date': start_date_str,
307
- 'End Date': end_date_str,
308
- 'Calculated Value': calculated_value
309
- }
310
- if shape_type.lower() == 'point':
311
- result[original_lat_col] = latitude
312
- result[original_lon_col] = longitude
313
- aggregated_results.append(result)
314
- except Exception as e:
315
- st.error(f"Error retrieving value for {location_name}: {e}")
316
- return aggregated_results
317
-
318
- def process_aggregation(locations_df, start_date_str, end_date_str, dataset_id, selected_bands, reducer_choice, shape_type, aggregation_period, original_lat_col, original_lon_col, custom_formula="", kernel_size=None, include_boundary=None, tile_cloud_threshold=0, pixel_cloud_threshold=0, user_scale=None):
319
- aggregated_results = []
320
- total_steps = len(locations_df)
321
- progress_bar = st.progress(0)
322
- progress_text = st.empty()
323
- start_time = time.time()
324
- raw_collection = ee.ImageCollection(dataset_id) \
325
- .filterDate(ee.Date(start_date_str), ee.Date(end_date_str))
326
- st.write(f"Original Collection Size: {raw_collection.size().getInfo()}")
327
- if tile_cloud_threshold > 0 or pixel_cloud_threshold > 0:
328
- raw_collection = preprocess_collection(raw_collection, tile_cloud_threshold, pixel_cloud_threshold)
329
- st.write(f"Preprocessed Collection Size: {raw_collection.size().getInfo()}")
330
- with ThreadPoolExecutor(max_workers=10) as executor:
331
- futures = []
332
- for idx, row in locations_df.iterrows():
333
- future = executor.submit(
334
- process_single_geometry,
335
- row,
336
- start_date_str,
337
- end_date_str,
338
- dataset_id,
339
- selected_bands,
340
- reducer_choice,
341
- shape_type,
342
- aggregation_period,
343
- custom_formula,
344
- original_lat_col,
345
- original_lon_col,
346
- kernel_size,
347
- include_boundary,
348
- user_scale=user_scale
349
- )
350
- futures.append(future)
351
- completed = 0
352
- for future in as_completed(futures):
353
- result = future.result()
354
- if result:
355
- aggregated_results.extend(result)
356
- completed += 1
357
- progress_percentage = completed / total_steps
358
- progress_bar.progress(progress_percentage)
359
- progress_text.markdown(f"Processing: {int(progress_percentage * 100)}%")
360
- end_time = time.time()
361
- processing_time = end_time - start_time
362
- if aggregated_results:
363
- result_df = pd.DataFrame(aggregated_results)
364
- if aggregation_period.lower() == 'custom (start date to end date)':
365
- agg_dict = {
366
- 'Start Date': 'first',
367
- 'End Date': 'first',
368
- 'Calculated Value': 'mean'
369
- }
370
- if shape_type.lower() == 'point':
371
- agg_dict[original_lat_col] = 'first'
372
- agg_dict[original_lon_col] = 'first'
373
- aggregated_output = result_df.groupby('Location Name').agg(agg_dict).reset_index()
374
- aggregated_output['Date Range'] = aggregated_output['Start Date'] + " to " + aggregated_output['End Date']
375
- aggregated_output.rename(columns={'Calculated Value': 'Aggregated Value'}, inplace=True)
376
- return aggregated_output.to_dict(orient='records'), processing_time
377
- else:
378
- return result_df.to_dict(orient='records'), processing_time
379
- return [], processing_time
380
-
381
- # Streamlit App Logic
382
- st.markdown("<h5>Image Collection</h5>", unsafe_allow_html=True)
383
- imagery_base = st.selectbox("Select Imagery Base", ["Sentinel", "Landsat", "MODIS", "VIIRS", "Custom Input"], index=0)
384
- data = {}
385
- if imagery_base == "Sentinel":
386
- dataset_file = "sentinel_datasets.json"
387
- try:
388
- with open(dataset_file) as f:
389
- data = json.load(f)
390
- except FileNotFoundError:
391
- st.error(f"Dataset file '{dataset_file}' not found.")
392
- data = {}
393
- elif imagery_base == "Landsat":
394
- dataset_file = "landsat_datasets.json"
395
- try:
396
- with open(dataset_file) as f:
397
- data = json.load(f)
398
- except FileNotFoundError:
399
- st.error(f"Dataset file '{dataset_file}' not found.")
400
- data = {}
401
- elif imagery_base == "MODIS":
402
- dataset_file = "modis_datasets.json"
403
- try:
404
- with open(dataset_file) as f:
405
- data = json.load(f)
406
- except FileNotFoundError:
407
- st.error(f"Dataset file '{dataset_file}' not found.")
408
- data = {}
409
- elif imagery_base == "VIIRS":
410
- dataset_file = "viirs_datasets.json"
411
- try:
412
- with open(dataset_file) as f:
413
- data = json.load(f)
414
- except FileNotFoundError:
415
- st.error(f"Dataset file '{dataset_file}' not found.")
416
- data = {}
417
- elif imagery_base == "Custom Input":
418
- custom_dataset_id = st.text_input("Enter Custom Earth Engine Dataset ID (e.g., AHN/AHN4)", value="")
419
- if custom_dataset_id:
420
- try:
421
- if custom_dataset_id.startswith("ee.ImageCollection("):
422
- custom_dataset_id = custom_dataset_id.replace("ee.ImageCollection('", "").replace("')", "")
423
- collection = ee.ImageCollection(custom_dataset_id)
424
- band_names = collection.first().bandNames().getInfo()
425
- data = {
426
- f"Custom Dataset: {custom_dataset_id}": {
427
- "sub_options": {custom_dataset_id: f"Custom Dataset ({custom_dataset_id})"},
428
- "bands": {custom_dataset_id: band_names}
429
- }
430
- }
431
- st.write(f"Fetched bands for {custom_dataset_id}: {', '.join(band_names)}")
432
- except Exception as e:
433
- st.error(f"Error fetching dataset: {str(e)}. Please check the dataset ID and ensure it's valid in Google Earth Engine.")
434
- data = {}
435
- else:
436
- st.warning("Please enter a custom dataset ID to proceed.")
437
- data = {}
438
- if not data:
439
- st.error("No valid dataset available. Please check your inputs.")
440
- st.stop()
441
-
442
- st.markdown("<hr><h5><b>{}</b></h5>".format(imagery_base), unsafe_allow_html=True)
443
- main_selection = st.selectbox(f"Select {imagery_base} Dataset Category", list(data.keys()))
444
- sub_selection = None
445
- dataset_id = None
446
- if main_selection:
447
- sub_options = data[main_selection]["sub_options"]
448
- sub_selection = st.selectbox(f"Select Specific {imagery_base} Dataset ID", list(sub_options.keys()))
449
- if sub_selection:
450
- st.write(f"You selected: {main_selection} -> {sub_options[sub_selection]}")
451
- st.write(f"Dataset ID: {sub_selection}")
452
- dataset_id = sub_selection
453
-
454
- st.markdown("<hr><h5><b>Earth Engine Index Calculator</b></h5>", unsafe_allow_html=True)
455
- if main_selection and sub_selection:
456
- dataset_bands = data[main_selection]["bands"].get(sub_selection, [])
457
- st.write(f"Available Bands for {sub_options[sub_selection]}: {', '.join(dataset_bands)}")
458
- selected_bands = st.multiselect(
459
- "Select 1 or 2 Bands for Calculation",
460
- options=dataset_bands,
461
- default=[dataset_bands[0]] if dataset_bands else [],
462
- help=f"Select 1 or 2 bands from: {', '.join(dataset_bands)}"
463
- )
464
- if len(selected_bands) < 1:
465
- st.warning("Please select at least one band.")
466
- st.stop()
467
- if selected_bands:
468
- if len(selected_bands) == 1:
469
- default_formula = f"{selected_bands[0]}"
470
- example = f"'{selected_bands[0]} * 2' or '{selected_bands[0]} + 1'"
471
- else:
472
- default_formula = f"({selected_bands[0]} - {selected_bands[1]}) / ({selected_bands[0]} + {selected_bands[1]})"
473
- example = f"'{selected_bands[0]} * {selected_bands[1]} / 2' or '({selected_bands[0]} - {selected_bands[1]}) / ({selected_bands[0]} + {selected_bands[1]})'"
474
- custom_formula = st.text_input(
475
- "Enter Custom Formula (e.g (B8 - B4) / (B8 + B4) , B4*B3/2)",
476
- value=default_formula,
477
- help=f"Use only these bands: {', '.join(selected_bands)}. Examples: {example}"
478
- )
479
- def validate_formula(formula, selected_bands):
480
- allowed_chars = set(" +-*/()0123456789.")
481
- terms = re.findall(r'[a-zA-Z][a-zA-Z0-9_]*', formula)
482
- invalid_terms = [term for term in terms if term not in selected_bands]
483
- if invalid_terms:
484
- return False, f"Invalid terms in formula: {', '.join(invalid_terms)}. Use only {', '.join(selected_bands)}."
485
- if not all(char in allowed_chars or char in ''.join(selected_bands) for char in formula):
486
- return False, "Formula contains invalid characters. Use only bands, numbers, and operators (+, -, *, /, ())"
487
- return True, ""
488
- is_valid, error_message = validate_formula(custom_formula, selected_bands)
489
- if not is_valid:
490
- st.error(error_message)
491
- st.stop()
492
- elif not custom_formula:
493
- st.warning("Please enter a custom formula to proceed.")
494
- st.stop()
495
- st.write(f"Custom Formula: {custom_formula}")
496
-
497
- reducer_choice = st.selectbox(
498
- "Select Reducer (e.g, mean , sum , median , min , max , count)",
499
- ['mean', 'sum', 'median', 'min', 'max', 'count'],
500
- index=0
501
- )
502
- start_date = st.date_input("Start Date", value=pd.to_datetime('2024-11-01'))
503
- end_date = st.date_input("End Date", value=pd.to_datetime('2024-12-01'))
504
- start_date_str = start_date.strftime('%Y-%m-%d')
505
- end_date_str = end_date.strftime('%Y-%m-%d')
506
-
507
- if imagery_base == "Sentinel" and "Sentinel-2" in sub_options[sub_selection]:
508
- st.markdown("<h5>Cloud Filtering</h5>", unsafe_allow_html=True)
509
- tile_cloud_threshold = st.slider(
510
- "Select Maximum Tile-Based Cloud Coverage Threshold (%)",
511
- min_value=0,
512
- max_value=100,
513
- value=20,
514
- step=5,
515
- help="Tiles with cloud coverage exceeding this threshold will be excluded."
516
- )
517
- pixel_cloud_threshold = st.slider(
518
- "Select Maximum Pixel-Based Cloud Coverage Threshold (%)",
519
- min_value=0,
520
- max_value=100,
521
- value=10,
522
- step=5,
523
- help="Individual pixels with cloud coverage exceeding this threshold will be masked."
524
- )
525
-
526
- aggregation_period = st.selectbox(
527
- "Select Aggregation Period (e.g, Custom(Start Date to End Date) , Daily , Weekly , Monthly , Yearly)",
528
- ["Custom (Start Date to End Date)", "Daily", "Weekly", "Monthly", "Yearly"],
529
- index=0
530
- )
531
- shape_type = st.selectbox("Do you want to process 'Point' or 'Polygon' data?", ["Point", "Polygon"])
532
- kernel_size = None
533
- include_boundary = None
534
- if shape_type.lower() == "point":
535
- kernel_size = st.selectbox(
536
- "Select Calculation Area(e.g, Point , 3x3 Kernel , 5x5 Kernel)",
537
- ["Point", "3x3 Kernel", "5x5 Kernel"],
538
- index=0,
539
- help="Choose 'Point' for exact point calculation, or a kernel size for area averaging."
540
- )
541
- elif shape_type.lower() == "polygon":
542
- include_boundary = st.checkbox(
543
- "Include Boundary Pixels",
544
- value=True,
545
- help="Check to include pixels on the polygon boundary; uncheck to exclude them."
546
- )
547
-
548
- st.markdown("<h5>Calculation Scale</h5>", unsafe_allow_html=True)
549
- default_scale = ee.ImageCollection(dataset_id).first().select(0).projection().nominalScale().getInfo()
550
- user_scale = st.number_input(
551
- "Enter Calculation Scale (meters) [Leave blank to use dataset's default scale]",
552
- min_value=1.0,
553
- value=float(default_scale),
554
- help=f"Default scale for this dataset is {default_scale} meters. Adjust if needed."
555
- )
556
-
557
- file_upload = st.file_uploader(f"Upload your {shape_type} data (CSV, GeoJSON, KML)", type=["csv", "geojson", "kml"])
558
- locations_df = pd.DataFrame()
559
- original_lat_col = None
560
- original_lon_col = None
561
- if file_upload is not None:
562
- if shape_type.lower() == "point":
563
- if file_upload.name.endswith('.csv'):
564
- locations_df = pd.read_csv(file_upload)
565
- st.write("Preview of your uploaded data (first 5 rows):")
566
- st.dataframe(locations_df.head())
567
- all_columns = locations_df.columns.tolist()
568
- col1, col2 = st.columns(2)
569
- with col1:
570
- original_lat_col = st.selectbox(
571
- "Select Latitude Column",
572
- options=all_columns,
573
- index=all_columns.index('latitude') if 'latitude' in all_columns else 0,
574
- help="Select the column containing latitude values"
575
- )
576
- with col2:
577
- original_lon_col = st.selectbox(
578
- "Select Longitude Column",
579
- options=all_columns,
580
- index=all_columns.index('longitude') if 'longitude' in all_columns else 0,
581
- help="Select the column containing longitude values"
582
- )
583
- if not pd.api.types.is_numeric_dtype(locations_df[original_lat_col]) or not pd.api.types.is_numeric_dtype(locations_df[original_lon_col]):
584
- st.error("Error: Selected Latitude and Longitude columns must contain numeric values")
585
- st.stop()
586
- locations_df = locations_df.rename(columns={
587
- original_lat_col: 'latitude',
588
- original_lon_col: 'longitude'
589
- })
590
- elif file_upload.name.endswith('.geojson'):
591
- locations_df = gpd.read_file(file_upload)
592
- if 'geometry' in locations_df.columns:
593
- locations_df['latitude'] = locations_df['geometry'].y
594
- locations_df['longitude'] = locations_df['geometry'].x
595
- original_lat_col = 'latitude'
596
- original_lon_col = 'longitude'
597
- else:
598
- st.error("GeoJSON file doesn't contain geometry column")
599
- st.stop()
600
- elif file_upload.name.endswith('.kml'):
601
- kml_string = file_upload.read().decode('utf-8')
602
- try:
603
- root = XET.fromstring(kml_string)
604
- ns = {'kml': 'http://www.opengis.net/kml/2.2'}
605
- points = []
606
- for placemark in root.findall('.//kml:Placemark', ns):
607
- name = placemark.findtext('kml:name', default=f"Point_{len(points)}", namespaces=ns)
608
- coords_elem = placemark.find('.//kml:Point/kml:coordinates', ns)
609
- if coords_elem is not None:
610
- coords_text = coords_elem.text.strip()
611
- coords = [c.strip() for c in coords_text.split(',')]
612
- if len(coords) >= 2:
613
- lon, lat = float(coords[0]), float(coords[1])
614
- points.append({'name': name, 'geometry': f"POINT ({lon} {lat})"})
615
- if not points:
616
- st.error("No valid Point data found in the KML file.")
617
- else:
618
- locations_df = gpd.GeoDataFrame(points, geometry=gpd.GeoSeries.from_wkt([p['geometry'] for p in points]), crs="EPSG:4326")
619
- locations_df['latitude'] = locations_df['geometry'].y
620
- locations_df['longitude'] = locations_df['geometry'].x
621
- original_lat_col = 'latitude'
622
- original_lon_col = 'longitude'
623
- except Exception as e:
624
- st.error(f"Error parsing KML file: {str(e)}")
625
- if not locations_df.empty and 'latitude' in locations_df.columns and 'longitude' in locations_df.columns:
626
- m = leafmap.Map(center=[locations_df['latitude'].mean(), locations_df['longitude'].mean()], zoom=10)
627
- for _, row in locations_df.iterrows():
628
- latitude = row['latitude']
629
- longitude = row['longitude']
630
- if pd.isna(latitude) or pd.isna(longitude):
631
- continue
632
- m.add_marker(location=[latitude, longitude], popup=row.get('name', 'No Name'))
633
- st.write("Map of Uploaded Points:")
634
- m.to_streamlit()
635
- elif shape_type.lower() == "polygon":
636
- if file_upload.name.endswith('.csv'):
637
- st.error("CSV upload not supported for polygons. Please upload a GeoJSON or KML file.")
638
- elif file_upload.name.endswith('.geojson'):
639
- locations_df = gpd.read_file(file_upload)
640
- if 'geometry' not in locations_df.columns:
641
- st.error("GeoJSON file doesn't contain geometry column")
642
- st.stop()
643
- elif file_upload.name.endswith('.kml'):
644
- kml_string = file_upload.read().decode('utf-8')
645
- try:
646
- root = XET.fromstring(kml_string)
647
- ns = {'kml': 'http://www.opengis.net/kml/2.2'}
648
- polygons = []
649
- for placemark in root.findall('.//kml:Placemark', ns):
650
- name = placemark.findtext('kml:name', default=f"Polygon_{len(polygons)}", namespaces=ns)
651
- coords_elem = placemark.find('.//kml:Polygon//kml:coordinates', ns)
652
- if coords_elem is not None:
653
- coords_text = ' '.join(coords_elem.text.split())
654
- coord_pairs = [pair.split(',')[:2] for pair in coords_text.split() if pair]
655
- if len(coord_pairs) >= 4:
656
- coords_str = " ".join([f"{float(lon)} {float(lat)}" for lon, lat in coord_pairs])
657
- polygons.append({'name': name, 'geometry': f"POLYGON (({coords_str}))"})
658
- if not polygons:
659
- st.error("No valid Polygon data found in the KML file.")
660
- else:
661
- locations_df = gpd.GeoDataFrame(polygons, geometry=gpd.GeoSeries.from_wkt([p['geometry'] for p in polygons]), crs="EPSG:4326")
662
- except Exception as e:
663
- st.error(f"Error parsing KML file: {str(e)}")
664
- if not locations_df.empty and 'geometry' in locations_df.columns:
665
- centroid_lat = locations_df.geometry.centroid.y.mean()
666
- centroid_lon = locations_df.geometry.centroid.x.mean()
667
- m = leafmap.Map(center=[centroid_lat, centroid_lon], zoom=10)
668
- for _, row in locations_df.iterrows():
669
- polygon = row['geometry']
670
- if polygon.is_valid:
671
- gdf = gpd.GeoDataFrame([row], geometry=[polygon], crs=locations_df.crs)
672
- m.add_gdf(gdf=gdf, layer_name=row.get('name', 'Unnamed Polygon'))
673
- st.write("Map of Uploaded Polygons:")
674
- m.to_streamlit()
675
-
676
- if st.button(f"Calculate {custom_formula}"):
677
- if not locations_df.empty:
678
- with st.spinner("Processing Data..."):
679
- try:
680
- results, processing_time = process_aggregation(
681
- locations_df,
682
- start_date_str,
683
- end_date_str,
684
- dataset_id,
685
- selected_bands,
686
- reducer_choice,
687
- shape_type,
688
- aggregation_period,
689
- original_lat_col,
690
- original_lon_col,
691
- custom_formula,
692
- kernel_size,
693
- include_boundary,
694
- tile_cloud_threshold=tile_cloud_threshold if "tile_cloud_threshold" in locals() else 0,
695
- pixel_cloud_threshold=pixel_cloud_threshold if "pixel_cloud_threshold" in locals() else 0,
696
- user_scale=user_scale
697
- )
698
- if results:
699
- result_df = pd.DataFrame(results)
700
- st.write(f"Processed Results Table ({aggregation_period}) for Formula: {custom_formula}")
701
- st.dataframe(result_df)
702
- filename = f"{main_selection}_{dataset_id}_{start_date.strftime('%Y%m%d')}_{end_date.strftime('%Y%m%d')}_{aggregation_period.lower()}.csv"
703
- st.download_button(
704
- label="Download results as CSV",
705
- data=result_df.to_csv(index=False).encode('utf-8'),
706
- file_name=filename,
707
- mime='text/csv'
708
- )
709
- st.success(f"Processing complete! Total processing time: {processing_time:.2f} seconds.")
710
- st.markdown("<h5>Graph Visualization</h5>", unsafe_allow_html=True)
711
- if aggregation_period.lower() == 'custom (start date to end date)':
712
- x_column = 'Date Range'
713
- elif 'Date' in result_df.columns:
714
- x_column = 'Date'
715
- elif 'Week' in result_df.columns:
716
- x_column = 'Week'
717
- elif 'Month' in result_df.columns:
718
- x_column = 'Month'
719
- elif 'Year' in result_df.columns:
720
- x_column = 'Year'
721
- else:
722
- st.warning("No valid time column found for plotting.")
723
- st.stop()
724
- y_column = 'Calculated Value'
725
- fig = px.line(
726
- result_df,
727
- x=x_column,
728
- y=y_column,
729
- color='Location Name',
730
- title=f"{custom_formula} Over Time"
731
- )
732
- st.plotly_chart(fig)
733
- else:
734
- st.warning("No results were generated. Check your inputs or formula.")
735
- st.info(f"Total processing time: {processing_time:.2f} seconds.")
736
- except Exception as e:
737
- st.error(f"An error occurred during processing: {str(e)}")
738
- else:
739
  st.warning("Please upload a valid file to proceed.")
 
1
+ import streamlit as st
2
+ import json
3
+ import ee
4
+ import os
5
+ import pandas as pd
6
+ import geopandas as gpd
7
+ from datetime import datetime
8
+ import leafmap.foliumap as leafmap
9
+ import re
10
+ from shapely.geometry import base
11
+ from xml.etree import ElementTree as XET
12
+ from concurrent.futures import ThreadPoolExecutor, as_completed
13
+ import time
14
+ import matplotlib.pyplot as plt
15
+ import plotly.express as px
16
+
17
+ # Set up the page layout
18
+ st.set_page_config(layout="wide")
19
+
20
+ # Custom button styling
21
+ m = st.markdown(
22
+ """
23
+ <style>
24
+ div.stButton > button:first-child {
25
+ background-color: #006400;
26
+ color:#ffffff;
27
+ }
28
+ </style>""",
29
+ unsafe_allow_html=True,
30
+ )
31
+
32
+ # Logo and Title
33
+ st.write(
34
+ f"""
35
+ <div style="display: flex; justify-content: space-between; align-items: center;">
36
+ <img src="https://huggingface.co/spaces/YashMK89/SATRANG/resolve/main/ISRO_Logo.png" style="width: 20%; margin-right: auto;">
37
+ <img src="https://huggingface.co/spaces/YashMK89/SATRANG/resolve/main/SAC_Logo.png" style="width: 20%; margin-left: auto;">
38
+ </div>
39
+ """,
40
+ unsafe_allow_html=True,
41
+ )
42
+ st.markdown(
43
+ f"""
44
+ <div style="display: flex; flex-direction: column; align-items: center;">
45
+ <img src="https://huggingface.co/spaces/YashMK89/SATRANG/resolve/main/SATRANG.png" style="width: 30%;">
46
+ <h3 style="text-align: center; margin: 0;">( Spatial and Temporal Aggregation for Remote-sensing Analysis of GEE Data )</h3>
47
+ </div>
48
+ <hr>
49
+ """,
50
+ unsafe_allow_html=True,
51
+ )
52
+
53
+ # Authenticate and initialize Earth Engine
54
+ earthengine_credentials = os.environ.get("EE_Authentication")
55
+ os.makedirs(os.path.expanduser("~/.config/earthengine/"), exist_ok=True)
56
+ with open(os.path.expanduser("~/.config/earthengine/credentials"), "w") as f:
57
+ f.write(earthengine_credentials)
58
+ ee.Initialize(project='ee-yashsacisro24')
59
+
60
+ # Helper function to get reducer
61
+ def get_reducer(reducer_name):
62
+ reducers = {
63
+ 'mean': ee.Reducer.mean(),
64
+ 'sum': ee.Reducer.sum(),
65
+ 'median': ee.Reducer.median(),
66
+ 'min': ee.Reducer.min(),
67
+ 'max': ee.Reducer.max(),
68
+ 'count': ee.Reducer.count(),
69
+ }
70
+ return reducers.get(reducer_name.lower(), ee.Reducer.mean())
71
+
72
+ # Function to convert geometry to Earth Engine format
73
+ def convert_to_ee_geometry(geometry):
74
+ if isinstance(geometry, base.BaseGeometry):
75
+ if geometry.is_valid:
76
+ geojson = geometry.__geo_interface__
77
+ return ee.Geometry(geojson)
78
+ else:
79
+ raise ValueError("Invalid geometry: The polygon geometry is not valid.")
80
+ elif isinstance(geometry, dict) or isinstance(geometry, str):
81
+ try:
82
+ if isinstance(geometry, str):
83
+ geometry = json.loads(geometry)
84
+ if 'type' in geometry and 'coordinates' in geometry:
85
+ return ee.Geometry(geometry)
86
+ else:
87
+ raise ValueError("GeoJSON format is invalid.")
88
+ except Exception as e:
89
+ raise ValueError(f"Error parsing GeoJSON: {e}")
90
+ elif isinstance(geometry, str) and geometry.lower().endswith(".kml"):
91
+ try:
92
+ tree = XET.parse(geometry)
93
+ kml_root = tree.getroot()
94
+ kml_namespace = {'kml': 'http://www.opengis.net/kml/2.2'}
95
+ coordinates = kml_root.findall(".//kml:coordinates", kml_namespace)
96
+ if coordinates:
97
+ coords_text = coordinates[0].text.strip()
98
+ coords = coords_text.split()
99
+ coords = [tuple(map(float, coord.split(','))) for coord in coords]
100
+ geojson = {"type": "Polygon", "coordinates": [coords]}
101
+ return ee.Geometry(geojson)
102
+ else:
103
+ raise ValueError("KML does not contain valid coordinates.")
104
+ except Exception as e:
105
+ raise ValueError(f"Error parsing KML: {e}")
106
+ else:
107
+ raise ValueError("Unsupported geometry input type. Supported types are Shapely, GeoJSON, and KML.")
108
+
109
+ # Function to calculate custom formula
110
+ def calculate_custom_formula(image, geometry, selected_bands, custom_formula, reducer_choice, dataset_id, user_scale=None):
111
+ try:
112
+ # Determine the scale: Use user-defined scale if provided, otherwise use dataset's native resolution
113
+ default_scale = ee.ImageCollection(dataset_id).first().select(0).projection().nominalScale().getInfo()
114
+ scale = user_scale if user_scale is not None else default_scale
115
+ band_values = {}
116
+ band_names = image.bandNames().getInfo()
117
+ for band in selected_bands:
118
+ if band not in band_names:
119
+ raise ValueError(f"Band '{band}' not found in the dataset.")
120
+ band_values[band] = image.select(band)
121
+ reducer = get_reducer(reducer_choice)
122
+ reduced_values = {}
123
+ for band in selected_bands:
124
+ value = band_values[band].reduceRegion(
125
+ reducer=reducer,
126
+ geometry=geometry,
127
+ scale=scale
128
+ ).get(band).getInfo()
129
+ reduced_values[band] = float(value if value is not None else 0)
130
+ formula = custom_formula
131
+ for band in selected_bands:
132
+ formula = formula.replace(band, str(reduced_values[band]))
133
+ result = eval(formula, {"__builtins__": {}}, reduced_values)
134
+ if not isinstance(result, (int, float)):
135
+ raise ValueError("Formula did not result in a numeric value.")
136
+ return ee.Image.constant(result).rename('custom_result')
137
+ except ZeroDivisionError:
138
+ st.error("Error: Division by zero in the formula.")
139
+ return ee.Image(0).rename('custom_result').set('error', 'Division by zero')
140
+ except SyntaxError:
141
+ st.error(f"Error: Invalid syntax in formula '{custom_formula}'.")
142
+ return ee.Image(0).rename('custom_result').set('error', 'Invalid syntax')
143
+ except ValueError as e:
144
+ st.error(f"Error: {str(e)}")
145
+ return ee.Image(0).rename('custom_result').set('error', str(e))
146
+ except Exception as e:
147
+ st.error(f"Unexpected error: {e}")
148
+ return ee.Image(0).rename('custom_result').set('error', str(e))
149
+
150
+ # Aggregation functions
151
+ def aggregate_data_custom(collection):
152
+ collection = collection.map(lambda image: image.set('day', ee.Date(image.get('system:time_start')).format('YYYY-MM-dd')))
153
+ grouped_by_day = collection.aggregate_array('day').distinct()
154
+ def calculate_daily_mean(day):
155
+ daily_collection = collection.filter(ee.Filter.eq('day', day))
156
+ daily_mean = daily_collection.mean()
157
+ return daily_mean.set('day', day)
158
+ daily_images = ee.List(grouped_by_day.map(calculate_daily_mean))
159
+ return ee.ImageCollection(daily_images)
160
+
161
+ def aggregate_data_daily(collection):
162
+ def set_day_start(image):
163
+ date = ee.Date(image.get('system:time_start'))
164
+ day_start = date.format('YYYY-MM-dd')
165
+ return image.set('day_start', day_start)
166
+ collection = collection.map(set_day_start)
167
+ grouped_by_day = collection.aggregate_array('day_start').distinct()
168
+ def calculate_daily_mean(day_start):
169
+ daily_collection = collection.filter(ee.Filter.eq('day_start', day_start))
170
+ daily_mean = daily_collection.mean()
171
+ return daily_mean.set('day_start', day_start)
172
+ daily_images = ee.List(grouped_by_day.map(calculate_daily_mean))
173
+ return ee.ImageCollection(daily_images)
174
+
175
+ def aggregate_data_weekly(collection, start_date_str, end_date_str):
176
+ start_date = ee.Date(start_date_str)
177
+ end_date = ee.Date(end_date_str)
178
+ days_diff = end_date.difference(start_date, 'day')
179
+ num_weeks = days_diff.divide(7).ceil().getInfo()
180
+ weekly_images = []
181
+ for week in range(num_weeks):
182
+ week_start = start_date.advance(week * 7, 'day')
183
+ week_end = week_start.advance(7, 'day')
184
+ weekly_collection = collection.filterDate(week_start, week_end)
185
+ if weekly_collection.size().getInfo() > 0:
186
+ weekly_mean = weekly_collection.mean()
187
+ weekly_mean = weekly_mean.set('week_start', week_start.format('YYYY-MM-dd'))
188
+ weekly_images.append(weekly_mean)
189
+ return ee.ImageCollection.fromImages(weekly_images)
190
+
191
+ def aggregate_data_monthly(collection, start_date, end_date):
192
+ collection = collection.filterDate(start_date, end_date)
193
+ collection = collection.map(lambda image: image.set('month', ee.Date(image.get('system:time_start')).format('YYYY-MM')))
194
+ grouped_by_month = collection.aggregate_array('month').distinct()
195
+ def calculate_monthly_mean(month):
196
+ monthly_collection = collection.filter(ee.Filter.eq('month', month))
197
+ monthly_mean = monthly_collection.mean()
198
+ return monthly_mean.set('month', month)
199
+ monthly_images = ee.List(grouped_by_month.map(calculate_monthly_mean))
200
+ return ee.ImageCollection(monthly_images)
201
+
202
+ def aggregate_data_yearly(collection):
203
+ collection = collection.map(lambda image: image.set('year', ee.Date(image.get('system:time_start')).format('YYYY')))
204
+ grouped_by_year = collection.aggregate_array('year').distinct()
205
+ def calculate_yearly_mean(year):
206
+ yearly_collection = collection.filter(ee.Filter.eq('year', year))
207
+ yearly_mean = yearly_collection.mean()
208
+ return yearly_mean.set('year', year)
209
+ yearly_images = ee.List(grouped_by_year.map(calculate_yearly_mean))
210
+ return ee.ImageCollection(yearly_images)
211
+
212
+ def preprocess_collection(collection, tile_cloud_threshold, pixel_cloud_threshold):
213
+ def filter_tile(image):
214
+ cloud_percentage = calculate_cloud_percentage(image, cloud_band='QA60')
215
+ return image.set('cloud_percentage', cloud_percentage).updateMask(cloud_percentage.lt(tile_cloud_threshold))
216
+ def mask_cloudy_pixels(image):
217
+ qa60 = image.select('QA60')
218
+ opaque_clouds = qa60.bitwiseAnd(1 << 10)
219
+ cirrus_clouds = qa60.bitwiseAnd(1 << 11)
220
+ cloud_mask = opaque_clouds.Or(cirrus_clouds)
221
+ clear_pixels = cloud_mask.Not()
222
+ return image.updateMask(clear_pixels)
223
+ filtered_collection = collection.map(filter_tile)
224
+ masked_collection = filtered_collection.map(mask_cloudy_pixels)
225
+ return masked_collection
226
+
227
+ def process_single_geometry(row, start_date_str, end_date_str, dataset_id, selected_bands, reducer_choice, shape_type, aggregation_period, custom_formula, original_lat_col, original_lon_col, kernel_size=None, include_boundary=None, user_scale=None):
228
+ if shape_type.lower() == "point":
229
+ latitude = row.get('latitude')
230
+ longitude = row.get('longitude')
231
+ if pd.isna(latitude) or pd.isna(longitude):
232
+ return None
233
+ location_name = row.get('name', f"Location_{row.name}")
234
+ if kernel_size == "3x3 Kernel":
235
+ buffer_size = 45
236
+ roi = ee.Geometry.Point([longitude, latitude]).buffer(buffer_size).bounds()
237
+ elif kernel_size == "5x5 Kernel":
238
+ buffer_size = 75
239
+ roi = ee.Geometry.Point([longitude, latitude]).buffer(buffer_size).bounds()
240
+ else:
241
+ roi = ee.Geometry.Point([longitude, latitude])
242
+ elif shape_type.lower() == "polygon":
243
+ polygon_geometry = row.get('geometry')
244
+ location_name = row.get('name', f"Polygon_{row.name}")
245
+ try:
246
+ roi = convert_to_ee_geometry(polygon_geometry)
247
+ if not include_boundary:
248
+ roi = roi.buffer(-30).bounds()
249
+ except ValueError:
250
+ return None
251
+ collection = ee.ImageCollection(dataset_id) \
252
+ .filterDate(ee.Date(start_date_str), ee.Date(end_date_str)) \
253
+ .filterBounds(roi)
254
+ if aggregation_period.lower() == 'custom (start date to end date)':
255
+ collection = aggregate_data_custom(collection)
256
+ elif aggregation_period.lower() == 'daily':
257
+ collection = aggregate_data_daily(collection)
258
+ elif aggregation_period.lower() == 'weekly':
259
+ collection = aggregate_data_weekly(collection, start_date_str, end_date_str)
260
+ elif aggregation_period.lower() == 'monthly':
261
+ collection = aggregate_data_monthly(collection, start_date_str, end_date_str)
262
+ elif aggregation_period.lower() == 'yearly':
263
+ collection = aggregate_data_yearly(collection)
264
+ image_list = collection.toList(collection.size())
265
+ processed_weeks = set()
266
+ aggregated_results = []
267
+ for i in range(image_list.size().getInfo()):
268
+ image = ee.Image(image_list.get(i))
269
+ if aggregation_period.lower() == 'custom (start date to end date)':
270
+ timestamp = image.get('day')
271
+ period_label = 'Date'
272
+ date = ee.Date(timestamp).format('YYYY-MM-dd').getInfo()
273
+ elif aggregation_period.lower() == 'daily':
274
+ timestamp = image.get('day_start')
275
+ period_label = 'Date'
276
+ date = ee.String(timestamp).getInfo()
277
+ elif aggregation_period.lower() == 'weekly':
278
+ timestamp = image.get('week_start')
279
+ period_label = 'Week'
280
+ date = ee.String(timestamp).getInfo()
281
+ if (pd.to_datetime(date) < pd.to_datetime(start_date_str) or
282
+ pd.to_datetime(date) > pd.to_datetime(end_date_str) or
283
+ date in processed_weeks):
284
+ continue
285
+ processed_weeks.add(date)
286
+ elif aggregation_period.lower() == 'monthly':
287
+ timestamp = image.get('month')
288
+ period_label = 'Month'
289
+ date = ee.Date(timestamp).format('YYYY-MM').getInfo()
290
+ elif aggregation_period.lower() == 'yearly':
291
+ timestamp = image.get('year')
292
+ period_label = 'Year'
293
+ date = ee.Date(timestamp).format('YYYY').getInfo()
294
+ index_image = calculate_custom_formula(image, roi, selected_bands, custom_formula, reducer_choice, dataset_id, user_scale=user_scale)
295
+ try:
296
+ index_value = index_image.reduceRegion(
297
+ reducer=get_reducer(reducer_choice),
298
+ geometry=roi,
299
+ scale=user_scale
300
+ ).get('custom_result')
301
+ calculated_value = index_value.getInfo()
302
+ if isinstance(calculated_value, (int, float)):
303
+ result = {
304
+ 'Location Name': location_name,
305
+ period_label: date,
306
+ 'Start Date': start_date_str,
307
+ 'End Date': end_date_str,
308
+ 'Calculated Value': calculated_value
309
+ }
310
+ if shape_type.lower() == 'point':
311
+ result[original_lat_col] = latitude
312
+ result[original_lon_col] = longitude
313
+ aggregated_results.append(result)
314
+ except Exception as e:
315
+ st.error(f"Error retrieving value for {location_name}: {e}")
316
+ return aggregated_results
317
+
318
+ def process_aggregation(locations_df, start_date_str, end_date_str, dataset_id, selected_bands, reducer_choice, shape_type, aggregation_period, original_lat_col, original_lon_col, custom_formula="", kernel_size=None, include_boundary=None, tile_cloud_threshold=0, pixel_cloud_threshold=0, user_scale=None):
319
+ aggregated_results = []
320
+ total_steps = len(locations_df)
321
+ progress_bar = st.progress(0)
322
+ progress_text = st.empty()
323
+ start_time = time.time()
324
+ raw_collection = ee.ImageCollection(dataset_id) \
325
+ .filterDate(ee.Date(start_date_str), ee.Date(end_date_str))
326
+ st.write(f"Original Collection Size: {raw_collection.size().getInfo()}")
327
+ if tile_cloud_threshold > 0 or pixel_cloud_threshold > 0:
328
+ raw_collection = preprocess_collection(raw_collection, tile_cloud_threshold, pixel_cloud_threshold)
329
+ st.write(f"Preprocessed Collection Size: {raw_collection.size().getInfo()}")
330
+ with ThreadPoolExecutor(max_workers=10) as executor:
331
+ futures = []
332
+ for idx, row in locations_df.iterrows():
333
+ future = executor.submit(
334
+ process_single_geometry,
335
+ row,
336
+ start_date_str,
337
+ end_date_str,
338
+ dataset_id,
339
+ selected_bands,
340
+ reducer_choice,
341
+ shape_type,
342
+ aggregation_period,
343
+ custom_formula,
344
+ original_lat_col,
345
+ original_lon_col,
346
+ kernel_size,
347
+ include_boundary,
348
+ user_scale=user_scale
349
+ )
350
+ futures.append(future)
351
+ completed = 0
352
+ for future in as_completed(futures):
353
+ result = future.result()
354
+ if result:
355
+ aggregated_results.extend(result)
356
+ completed += 1
357
+ progress_percentage = completed / total_steps
358
+ progress_bar.progress(progress_percentage)
359
+ progress_text.markdown(f"Processing: {int(progress_percentage * 100)}%")
360
+ end_time = time.time()
361
+ processing_time = end_time - start_time
362
+ if aggregated_results:
363
+ result_df = pd.DataFrame(aggregated_results)
364
+ if aggregation_period.lower() == 'custom (start date to end date)':
365
+ agg_dict = {
366
+ 'Start Date': 'first',
367
+ 'End Date': 'first',
368
+ 'Calculated Value': 'mean'
369
+ }
370
+ if shape_type.lower() == 'point':
371
+ agg_dict[original_lat_col] = 'first'
372
+ agg_dict[original_lon_col] = 'first'
373
+ aggregated_output = result_df.groupby('Location Name').agg(agg_dict).reset_index()
374
+ aggregated_output['Date Range'] = aggregated_output['Start Date'] + " to " + aggregated_output['End Date']
375
+ aggregated_output.rename(columns={'Calculated Value': 'Aggregated Value'}, inplace=True)
376
+ return aggregated_output.to_dict(orient='records'), processing_time
377
+ else:
378
+ return result_df.to_dict(orient='records'), processing_time
379
+ return [], processing_time
380
+
381
+ # Streamlit App Logic
382
+ st.markdown("<h5>Image Collection</h5>", unsafe_allow_html=True)
383
+ imagery_base = st.selectbox("Select Imagery Base", ["Sentinel", "Landsat", "MODIS", "VIIRS", "Custom Input"], index=0)
384
+ data = {}
385
+ if imagery_base == "Sentinel":
386
+ dataset_file = "sentinel_datasets.json"
387
+ try:
388
+ with open(dataset_file) as f:
389
+ data = json.load(f)
390
+ except FileNotFoundError:
391
+ st.error(f"Dataset file '{dataset_file}' not found.")
392
+ data = {}
393
+ elif imagery_base == "Landsat":
394
+ dataset_file = "landsat_datasets.json"
395
+ try:
396
+ with open(dataset_file) as f:
397
+ data = json.load(f)
398
+ except FileNotFoundError:
399
+ st.error(f"Dataset file '{dataset_file}' not found.")
400
+ data = {}
401
+ elif imagery_base == "MODIS":
402
+ dataset_file = "modis_datasets.json"
403
+ try:
404
+ with open(dataset_file) as f:
405
+ data = json.load(f)
406
+ except FileNotFoundError:
407
+ st.error(f"Dataset file '{dataset_file}' not found.")
408
+ data = {}
409
+ elif imagery_base == "VIIRS":
410
+ dataset_file = "viirs_datasets.json"
411
+ try:
412
+ with open(dataset_file) as f:
413
+ data = json.load(f)
414
+ except FileNotFoundError:
415
+ st.error(f"Dataset file '{dataset_file}' not found.")
416
+ data = {}
417
+ elif imagery_base == "Custom Input":
418
+ custom_dataset_id = st.text_input("Enter Custom Earth Engine Dataset ID (e.g., AHN/AHN4)", value="")
419
+ if custom_dataset_id:
420
+ try:
421
+ if custom_dataset_id.startswith("ee.ImageCollection("):
422
+ custom_dataset_id = custom_dataset_id.replace("ee.ImageCollection('", "").replace("')", "")
423
+ collection = ee.ImageCollection(custom_dataset_id)
424
+ band_names = collection.first().bandNames().getInfo()
425
+ data = {
426
+ f"Custom Dataset: {custom_dataset_id}": {
427
+ "sub_options": {custom_dataset_id: f"Custom Dataset ({custom_dataset_id})"},
428
+ "bands": {custom_dataset_id: band_names}
429
+ }
430
+ }
431
+ st.write(f"Fetched bands for {custom_dataset_id}: {', '.join(band_names)}")
432
+ except Exception as e:
433
+ st.error(f"Error fetching dataset: {str(e)}. Please check the dataset ID and ensure it's valid in Google Earth Engine.")
434
+ data = {}
435
+ else:
436
+ st.warning("Please enter a custom dataset ID to proceed.")
437
+ data = {}
438
+ if not data:
439
+ st.error("No valid dataset available. Please check your inputs.")
440
+ st.stop()
441
+
442
+ st.markdown("<hr><h5><b>{}</b></h5>".format(imagery_base), unsafe_allow_html=True)
443
+ main_selection = st.selectbox(f"Select {imagery_base} Dataset Category", list(data.keys()))
444
+ sub_selection = None
445
+ dataset_id = None
446
+ if main_selection:
447
+ sub_options = data[main_selection]["sub_options"]
448
+ sub_selection = st.selectbox(f"Select Specific {imagery_base} Dataset ID", list(sub_options.keys()))
449
+ if sub_selection:
450
+ st.write(f"You selected: {main_selection} -> {sub_options[sub_selection]}")
451
+ st.write(f"Dataset ID: {sub_selection}")
452
+ dataset_id = sub_selection
453
+
454
+ st.markdown("<hr><h5><b>Earth Engine Index Calculator</b></h5>", unsafe_allow_html=True)
455
+ if main_selection and sub_selection:
456
+ dataset_bands = data[main_selection]["bands"].get(sub_selection, [])
457
+ st.write(f"Available Bands for {sub_options[sub_selection]}: {', '.join(dataset_bands)}")
458
+ selected_bands = st.multiselect(
459
+ "Select 1 or 2 Bands for Calculation",
460
+ options=dataset_bands,
461
+ default=[dataset_bands[0]] if dataset_bands else [],
462
+ help=f"Select 1 or 2 bands from: {', '.join(dataset_bands)}"
463
+ )
464
+ if len(selected_bands) < 1:
465
+ st.warning("Please select at least one band.")
466
+ st.stop()
467
+ if selected_bands:
468
+ if len(selected_bands) == 1:
469
+ default_formula = f"{selected_bands[0]}"
470
+ example = f"'{selected_bands[0]} * 2' or '{selected_bands[0]} + 1'"
471
+ else:
472
+ default_formula = f"({selected_bands[0]} - {selected_bands[1]}) / ({selected_bands[0]} + {selected_bands[1]})"
473
+ example = f"'{selected_bands[0]} * {selected_bands[1]} / 2' or '({selected_bands[0]} - {selected_bands[1]}) / ({selected_bands[0]} + {selected_bands[1]})'"
474
+ custom_formula = st.text_input(
475
+ "Enter Custom Formula (e.g (B8 - B4) / (B8 + B4) , B4*B3/2)",
476
+ value=default_formula,
477
+ help=f"Use only these bands: {', '.join(selected_bands)}. Examples: {example}"
478
+ )
479
+ def validate_formula(formula, selected_bands):
480
+ allowed_chars = set(" +-*/()0123456789.")
481
+ terms = re.findall(r'[a-zA-Z][a-zA-Z0-9_]*', formula)
482
+ invalid_terms = [term for term in terms if term not in selected_bands]
483
+ if invalid_terms:
484
+ return False, f"Invalid terms in formula: {', '.join(invalid_terms)}. Use only {', '.join(selected_bands)}."
485
+ if not all(char in allowed_chars or char in ''.join(selected_bands) for char in formula):
486
+ return False, "Formula contains invalid characters. Use only bands, numbers, and operators (+, -, *, /, ())"
487
+ return True, ""
488
+ is_valid, error_message = validate_formula(custom_formula, selected_bands)
489
+ if not is_valid:
490
+ st.error(error_message)
491
+ st.stop()
492
+ elif not custom_formula:
493
+ st.warning("Please enter a custom formula to proceed.")
494
+ st.stop()
495
+ st.write(f"Custom Formula: {custom_formula}")
496
+
497
+ reducer_choice = st.selectbox(
498
+ "Select Reducer (e.g, mean , sum , median , min , max , count)",
499
+ ['mean', 'sum', 'median', 'min', 'max', 'count'],
500
+ index=0
501
+ )
502
+ start_date = st.date_input("Start Date", value=pd.to_datetime('2024-11-01'))
503
+ end_date = st.date_input("End Date", value=pd.to_datetime('2024-12-01'))
504
+ start_date_str = start_date.strftime('%Y-%m-%d')
505
+ end_date_str = end_date.strftime('%Y-%m-%d')
506
+
507
+ if imagery_base == "Sentinel" and "Sentinel-2" in sub_options[sub_selection]:
508
+ st.markdown("<h5>Cloud Filtering</h5>", unsafe_allow_html=True)
509
+ tile_cloud_threshold = st.slider(
510
+ "Select Maximum Tile-Based Cloud Coverage Threshold (%)",
511
+ min_value=0,
512
+ max_value=100,
513
+ value=20,
514
+ step=5,
515
+ help="Tiles with cloud coverage exceeding this threshold will be excluded."
516
+ )
517
+ pixel_cloud_threshold = st.slider(
518
+ "Select Maximum Pixel-Based Cloud Coverage Threshold (%)",
519
+ min_value=0,
520
+ max_value=100,
521
+ value=10,
522
+ step=5,
523
+ help="Individual pixels with cloud coverage exceeding this threshold will be masked."
524
+ )
525
+
526
+ aggregation_period = st.selectbox(
527
+ "Select Aggregation Period (e.g, Custom(Start Date to End Date) , Daily , Weekly , Monthly , Yearly)",
528
+ ["Custom (Start Date to End Date)", "Daily", "Weekly", "Monthly", "Yearly"],
529
+ index=0
530
+ )
531
+ shape_type = st.selectbox("Do you want to process 'Point' or 'Polygon' data?", ["Point", "Polygon"])
532
+ kernel_size = None
533
+ include_boundary = None
534
+ if shape_type.lower() == "point":
535
+ kernel_size = st.selectbox(
536
+ "Select Calculation Area(e.g, Point , 3x3 Kernel , 5x5 Kernel)",
537
+ ["Point", "3x3 Kernel", "5x5 Kernel"],
538
+ index=0,
539
+ help="Choose 'Point' for exact point calculation, or a kernel size for area averaging."
540
+ )
541
+ elif shape_type.lower() == "polygon":
542
+ include_boundary = st.checkbox(
543
+ "Include Boundary Pixels",
544
+ value=True,
545
+ help="Check to include pixels on the polygon boundary; uncheck to exclude them."
546
+ )
547
+
548
+ st.markdown("<h5>Calculation Scale</h5>", unsafe_allow_html=True)
549
+ default_scale = ee.ImageCollection(dataset_id).first().select(0).projection().nominalScale().getInfo()
550
+ user_scale = st.number_input(
551
+ "Enter Calculation Scale (meters) [Leave blank to use dataset's default scale]",
552
+ min_value=1.0,
553
+ value=float(default_scale),
554
+ help=f"Default scale for this dataset is {default_scale} meters. Adjust if needed."
555
+ )
556
+
557
+ file_upload = st.file_uploader(f"Upload your {shape_type} data (CSV, GeoJSON, KML)", type=["csv", "geojson", "kml"])
558
+ locations_df = pd.DataFrame()
559
+ original_lat_col = None
560
+ original_lon_col = None
561
+ if file_upload is not None:
562
+ if shape_type.lower() == "point":
563
+ if file_upload.name.endswith('.csv'):
564
+ locations_df = pd.read_csv(file_upload)
565
+ st.write("Preview of your uploaded data (first 5 rows):")
566
+ st.dataframe(locations_df.head())
567
+ all_columns = locations_df.columns.tolist()
568
+ col1, col2 = st.columns(2)
569
+ with col1:
570
+ original_lat_col = st.selectbox(
571
+ "Select Latitude Column",
572
+ options=all_columns,
573
+ index=all_columns.index('latitude') if 'latitude' in all_columns else 0,
574
+ help="Select the column containing latitude values"
575
+ )
576
+ with col2:
577
+ original_lon_col = st.selectbox(
578
+ "Select Longitude Column",
579
+ options=all_columns,
580
+ index=all_columns.index('longitude') if 'longitude' in all_columns else 0,
581
+ help="Select the column containing longitude values"
582
+ )
583
+ if not pd.api.types.is_numeric_dtype(locations_df[original_lat_col]) or not pd.api.types.is_numeric_dtype(locations_df[original_lon_col]):
584
+ st.error("Error: Selected Latitude and Longitude columns must contain numeric values")
585
+ st.stop()
586
+ locations_df = locations_df.rename(columns={
587
+ original_lat_col: 'latitude',
588
+ original_lon_col: 'longitude'
589
+ })
590
+ elif file_upload.name.endswith('.geojson'):
591
+ locations_df = gpd.read_file(file_upload)
592
+ if 'geometry' in locations_df.columns:
593
+ locations_df['latitude'] = locations_df['geometry'].y
594
+ locations_df['longitude'] = locations_df['geometry'].x
595
+ original_lat_col = 'latitude'
596
+ original_lon_col = 'longitude'
597
+ else:
598
+ st.error("GeoJSON file doesn't contain geometry column")
599
+ st.stop()
600
+ elif file_upload.name.endswith('.kml'):
601
+ kml_string = file_upload.read().decode('utf-8')
602
+ try:
603
+ root = XET.fromstring(kml_string)
604
+ ns = {'kml': 'http://www.opengis.net/kml/2.2'}
605
+ points = []
606
+ for placemark in root.findall('.//kml:Placemark', ns):
607
+ name = placemark.findtext('kml:name', default=f"Point_{len(points)}", namespaces=ns)
608
+ coords_elem = placemark.find('.//kml:Point/kml:coordinates', ns)
609
+ if coords_elem is not None:
610
+ coords_text = coords_elem.text.strip()
611
+ coords = [c.strip() for c in coords_text.split(',')]
612
+ if len(coords) >= 2:
613
+ lon, lat = float(coords[0]), float(coords[1])
614
+ points.append({'name': name, 'geometry': f"POINT ({lon} {lat})"})
615
+ if not points:
616
+ st.error("No valid Point data found in the KML file.")
617
+ else:
618
+ locations_df = gpd.GeoDataFrame(points, geometry=gpd.GeoSeries.from_wkt([p['geometry'] for p in points]), crs="EPSG:4326")
619
+ locations_df['latitude'] = locations_df['geometry'].y
620
+ locations_df['longitude'] = locations_df['geometry'].x
621
+ original_lat_col = 'latitude'
622
+ original_lon_col = 'longitude'
623
+ except Exception as e:
624
+ st.error(f"Error parsing KML file: {str(e)}")
625
+ if not locations_df.empty and 'latitude' in locations_df.columns and 'longitude' in locations_df.columns:
626
+ m = leafmap.Map(center=[locations_df['latitude'].mean(), locations_df['longitude'].mean()], zoom=10)
627
+ for _, row in locations_df.iterrows():
628
+ latitude = row['latitude']
629
+ longitude = row['longitude']
630
+ if pd.isna(latitude) or pd.isna(longitude):
631
+ continue
632
+ m.add_marker(location=[latitude, longitude], popup=row.get('name', 'No Name'))
633
+ st.write("Map of Uploaded Points:")
634
+ m.to_streamlit()
635
+ elif shape_type.lower() == "polygon":
636
+ if file_upload.name.endswith('.csv'):
637
+ st.error("CSV upload not supported for polygons. Please upload a GeoJSON or KML file.")
638
+ elif file_upload.name.endswith('.geojson'):
639
+ locations_df = gpd.read_file(file_upload)
640
+ if 'geometry' not in locations_df.columns:
641
+ st.error("GeoJSON file doesn't contain geometry column")
642
+ st.stop()
643
+ elif file_upload.name.endswith('.kml'):
644
+ kml_string = file_upload.read().decode('utf-8')
645
+ try:
646
+ root = XET.fromstring(kml_string)
647
+ ns = {'kml': 'http://www.opengis.net/kml/2.2'}
648
+ polygons = []
649
+ for placemark in root.findall('.//kml:Placemark', ns):
650
+ name = placemark.findtext('kml:name', default=f"Polygon_{len(polygons)}", namespaces=ns)
651
+ coords_elem = placemark.find('.//kml:Polygon//kml:coordinates', ns)
652
+ if coords_elem is not None:
653
+ coords_text = ' '.join(coords_elem.text.split())
654
+ coord_pairs = [pair.split(',')[:2] for pair in coords_text.split() if pair]
655
+ if len(coord_pairs) >= 4:
656
+ coords_str = " ".join([f"{float(lon)} {float(lat)}" for lon, lat in coord_pairs])
657
+ polygons.append({'name': name, 'geometry': f"POLYGON (({coords_str}))"})
658
+ if not polygons:
659
+ st.error("No valid Polygon data found in the KML file.")
660
+ else:
661
+ locations_df = gpd.GeoDataFrame(polygons, geometry=gpd.GeoSeries.from_wkt([p['geometry'] for p in polygons]), crs="EPSG:4326")
662
+ except Exception as e:
663
+ st.error(f"Error parsing KML file: {str(e)}")
664
+ if not locations_df.empty and 'geometry' in locations_df.columns:
665
+ centroid_lat = locations_df.geometry.centroid.y.mean()
666
+ centroid_lon = locations_df.geometry.centroid.x.mean()
667
+ m = leafmap.Map(center=[centroid_lat, centroid_lon], zoom=10)
668
+ for _, row in locations_df.iterrows():
669
+ polygon = row['geometry']
670
+ if polygon.is_valid:
671
+ gdf = gpd.GeoDataFrame([row], geometry=[polygon], crs=locations_df.crs)
672
+ m.add_gdf(gdf=gdf, layer_name=row.get('name', 'Unnamed Polygon'))
673
+ st.write("Map of Uploaded Polygons:")
674
+ m.to_streamlit()
675
+
676
+ if st.button(f"Calculate {custom_formula}"):
677
+ if not locations_df.empty:
678
+ with st.spinner("Processing Data..."):
679
+ try:
680
+ results, processing_time = process_aggregation(
681
+ locations_df,
682
+ start_date_str,
683
+ end_date_str,
684
+ dataset_id,
685
+ selected_bands,
686
+ reducer_choice,
687
+ shape_type,
688
+ aggregation_period,
689
+ original_lat_col,
690
+ original_lon_col,
691
+ custom_formula,
692
+ kernel_size,
693
+ include_boundary,
694
+ tile_cloud_threshold=tile_cloud_threshold if "tile_cloud_threshold" in locals() else 0,
695
+ pixel_cloud_threshold=pixel_cloud_threshold if "pixel_cloud_threshold" in locals() else 0,
696
+ user_scale=user_scale
697
+ )
698
+ if results:
699
+ result_df = pd.DataFrame(results)
700
+ st.write(f"Processed Results Table ({aggregation_period}) for Formula: {custom_formula}")
701
+ st.dataframe(result_df)
702
+ filename = f"{main_selection}_{dataset_id}_{start_date.strftime('%Y%m%d')}_{end_date.strftime('%Y%m%d')}_{aggregation_period.lower()}.csv"
703
+ st.download_button(
704
+ label="Download results as CSV",
705
+ data=result_df.to_csv(index=False).encode('utf-8'),
706
+ file_name=filename,
707
+ mime='text/csv'
708
+ )
709
+ st.success(f"Processing complete! Total processing time: {processing_time:.2f} seconds.")
710
+ st.markdown("<h5>Graph Visualization</h5>", unsafe_allow_html=True)
711
+ if aggregation_period.lower() == 'custom (start date to end date)':
712
+ x_column = 'Date Range'
713
+ elif 'Date' in result_df.columns:
714
+ x_column = 'Date'
715
+ elif 'Week' in result_df.columns:
716
+ x_column = 'Week'
717
+ elif 'Month' in result_df.columns:
718
+ x_column = 'Month'
719
+ elif 'Year' in result_df.columns:
720
+ x_column = 'Year'
721
+ else:
722
+ st.warning("No valid time column found for plotting.")
723
+ st.stop()
724
+ y_column = 'Calculated Value'
725
+ fig = px.line(
726
+ result_df,
727
+ x=x_column,
728
+ y=y_column,
729
+ color='Location Name',
730
+ title=f"{custom_formula} Over Time"
731
+ )
732
+ st.plotly_chart(fig)
733
+ else:
734
+ st.warning("No results were generated. Check your inputs or formula.")
735
+ st.info(f"Total processing time: {processing_time:.2f} seconds.")
736
+ except Exception as e:
737
+ st.error(f"An error occurred during processing: {str(e)}")
738
+ else:
739
  st.warning("Please upload a valid file to proceed.")