YashMK89 commited on
Commit
38b854e
·
verified ·
1 Parent(s): fe33fed

update app.py

Browse files
Files changed (1) hide show
  1. app.py +1048 -67
app.py CHANGED
@@ -1,3 +1,783 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  import json
3
  import ee
@@ -8,8 +788,10 @@ from datetime import datetime
8
  import leafmap.foliumap as leafmap
9
  import re
10
  from shapely.geometry import base
11
- from lxml import etree
12
- from xml.etree import ElementTree as ET
 
 
13
 
14
  # Set up the page layout
15
  st.set_page_config(layout="wide")
@@ -40,12 +822,56 @@ st.write(
40
  # Title
41
  st.markdown(
42
  f"""
43
- <h1 style="text-align: center;">Precision Analysis for Vegetation, Water, and Air Quality</h1>
 
 
 
 
44
  """,
45
  unsafe_allow_html=True,
46
  )
47
- st.write("<h2><div style='text-align: center;'>User Inputs</div></h2>", unsafe_allow_html=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
 
 
 
 
 
 
49
  # Authenticate and initialize Earth Engine
50
  earthengine_credentials = os.environ.get("EE_Authentication")
51
 
@@ -56,6 +882,8 @@ with open(os.path.expanduser("~/.config/earthengine/credentials"), "w") as f:
56
 
57
  ee.Initialize(project='ee-yashsacisro24')
58
 
 
 
59
  # Imagery base selection
60
  imagery_base = st.selectbox("Select Imagery Base", ["Sentinel", "Landsat", "MODIS", "Custom Input"], index=0)
61
 
@@ -97,8 +925,14 @@ elif imagery_base == "Custom Input":
97
  data = {}
98
 
99
  # Display the title for the Streamlit app
100
- st.title(f"{imagery_base} Dataset")
101
-
 
 
 
 
 
 
102
  # Select dataset category (main selection)
103
  if data:
104
  main_selection = st.selectbox(f"Select {imagery_base} Dataset Category", list(data.keys()))
@@ -114,46 +948,22 @@ if main_selection:
114
  sub_options = data[main_selection]["sub_options"]
115
  sub_selection = st.selectbox(f"Select Specific {imagery_base} Dataset ID", list(sub_options.keys()))
116
 
117
- # Display the selected dataset ID and its availability based on user input
118
  if sub_selection:
119
  st.write(f"You selected: {main_selection} -> {sub_options[sub_selection]}")
120
  st.write(f"Dataset ID: {sub_selection}")
121
  dataset_id = sub_selection # Use the key directly as the dataset ID
122
 
123
- # Fetch and display dataset availability in green text
124
- try:
125
- # Create an Earth Engine ImageCollection object for the selected dataset
126
- collection = ee.ImageCollection(dataset_id)
127
-
128
- # Get the date range of the collection
129
- range_info = collection.reduceColumns(
130
- reducer=ee.Reducer.minMax(),
131
- selectors=['system:time_start']
132
- ).getInfo()
133
-
134
- # Extract min and max timestamps (in milliseconds) and convert to readable dates
135
- min_time = range_info.get('min', None)
136
- max_time = range_info.get('max', None)
137
-
138
- if min_time and max_time:
139
- start_date = datetime.fromtimestamp(min_time / 1000).strftime('%Y-%m-%d')
140
- end_date = datetime.fromtimestamp(max_time / 1000).strftime('%Y-%m-%d')
141
- st.markdown(
142
- f"<strong>Dataset Availability:</strong> From <span style='color: #fc0101;'>{start_date}</span> to <span style='color: #fc0101;'>{end_date}</span>",
143
- unsafe_allow_html=True
144
- )
145
- else:
146
- st.markdown(
147
- f"<span style='color: #fc0101;'><strong>Dataset Availability:</strong> Date range not available.</span>",
148
- unsafe_allow_html=True
149
- )
150
-
151
- except Exception as e:
152
- st.error(f"Error fetching dataset availability: {str(e)}")
153
-
154
  # Earth Engine Index Calculator Section
155
- st.header("Earth Engine Index Calculator")
156
 
 
 
 
 
 
 
 
157
  # Load band information based on selected dataset
158
  if main_selection and sub_selection:
159
  dataset_bands = data[main_selection]["bands"].get(sub_selection, [])
@@ -209,6 +1019,8 @@ if main_selection and sub_selection:
209
 
210
  # Display the validated formula
211
  st.write(f"Custom Formula: {custom_formula}")
 
 
212
 
213
  # Function to get the corresponding reducer based on user input
214
  def get_reducer(reducer_name):
@@ -229,43 +1041,85 @@ reducer_choice = st.selectbox(
229
  index=0 # Default to 'mean'
230
  )
231
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
232
  # Function to convert geometry to Earth Engine format
233
  def convert_to_ee_geometry(geometry):
 
234
  if isinstance(geometry, base.BaseGeometry):
235
  if geometry.is_valid:
236
  geojson = geometry.__geo_interface__
 
237
  return ee.Geometry(geojson)
238
  else:
239
  raise ValueError("Invalid geometry: The polygon geometry is not valid.")
240
- elif isinstance(geometry, dict) or isinstance(geometry, str):
241
- try:
242
- if isinstance(geometry, str):
243
- geometry = json.loads(geometry)
244
- if 'type' in geometry and 'coordinates' in geometry:
245
- return ee.Geometry(geometry)
246
- else:
247
- raise ValueError("GeoJSON format is invalid.")
248
- except Exception as e:
249
- raise ValueError(f"Error parsing GeoJSON: {e}")
250
- elif isinstance(geometry, str) and geometry.lower().endswith(".kml"):
251
  try:
252
- tree = ET.parse(geometry)
253
- kml_root = tree.getroot()
254
- kml_namespace = {'kml': 'http://www.opengis.net/kml/2.2'}
255
- coordinates = kml_root.findall(".//kml:coordinates", kml_namespace)
256
- if coordinates:
257
- coords_text = coordinates[0].text.strip()
258
- coords = coords_text.split()
259
- coords = [tuple(map(float, coord.split(','))) for coord in coords]
260
- geojson = {"type": "Polygon", "coordinates": [coords]}
261
- return ee.Geometry(geojson)
262
  else:
263
- raise ValueError("KML does not contain valid coordinates.")
264
- except Exception as e:
265
- raise ValueError(f"Error parsing KML: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
266
  else:
267
- raise ValueError("Unsupported geometry input type. Supported types are Shapely, GeoJSON, and KML.")
268
-
269
  # Date Input for Start and End Dates
270
  start_date = st.date_input("Start Date", value=pd.to_datetime('2024-11-01'))
271
  end_date = st.date_input("End Date", value=pd.to_datetime('2024-12-01'))
@@ -301,6 +1155,85 @@ elif shape_type.lower() == "polygon":
301
  help="Check to include pixels on the polygon boundary; uncheck to exclude them."
302
  )
303
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
304
  # Ask user to upload a file based on shape type
305
  file_upload = st.file_uploader(f"Upload your {shape_type} data (CSV, GeoJSON, KML)", type=["csv", "geojson", "kml"])
306
 
@@ -312,7 +1245,31 @@ if file_upload is not None:
312
  elif file_upload.name.endswith('.geojson'):
313
  locations_df = gpd.read_file(file_upload)
314
  elif file_upload.name.endswith('.kml'):
315
- locations_df = gpd.read_file(file_upload)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
316
  else:
317
  st.error("Unsupported file format. Please upload CSV, GeoJSON, or KML.")
318
  locations_df = pd.DataFrame()
@@ -350,7 +1307,30 @@ if file_upload is not None:
350
  elif file_upload.name.endswith('.geojson'):
351
  locations_df = gpd.read_file(file_upload)
352
  elif file_upload.name.endswith('.kml'):
353
- locations_df = gpd.read_file(file_upload)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
354
  else:
355
  st.error("Unsupported file format. Please upload CSV, GeoJSON, or KML.")
356
  locations_df = pd.DataFrame()
@@ -379,6 +1359,7 @@ if file_upload is not None:
379
  m.to_streamlit()
380
  st.session_state.map_data = m
381
 
 
382
  # Initialize session state for storing results
383
  if 'results' not in st.session_state:
384
  st.session_state.results = []
 
1
+ # import streamlit as st
2
+ # import json
3
+ # import ee
4
+ # import os
5
+ # import pandas as pd
6
+ # import geopandas as gpd
7
+ # from datetime import datetime
8
+ # import leafmap.foliumap as leafmap
9
+ # import re
10
+ # from shapely.geometry import base
11
+ # from lxml import etree
12
+ # from xml.etree import ElementTree as ET
13
+
14
+ # # Set up the page layout
15
+ # st.set_page_config(layout="wide")
16
+
17
+ # # Custom button styling
18
+ # m = st.markdown(
19
+ # """
20
+ # <style>
21
+ # div.stButton > button:first-child {
22
+ # background-color: #006400;
23
+ # color:#ffffff;
24
+ # }
25
+ # </style>""",
26
+ # unsafe_allow_html=True,
27
+ # )
28
+
29
+ # # Logo
30
+ # st.write(
31
+ # f"""
32
+ # <div style="display: flex; justify-content: space-between; align-items: center;">
33
+ # <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/ISRO_Logo.png" style="width: 20%; margin-right: auto;">
34
+ # <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/SAC_Logo.png" style="width: 20%; margin-left: auto;">
35
+ # </div>
36
+ # """,
37
+ # unsafe_allow_html=True,
38
+ # )
39
+
40
+ # # Title
41
+ # st.markdown(
42
+ # f"""
43
+ # <h1 style="text-align: center;">Precision Analysis for Vegetation, Water, and Air Quality</h1>
44
+ # """,
45
+ # unsafe_allow_html=True,
46
+ # )
47
+ # st.write("<h2><div style='text-align: center;'>User Inputs</div></h2>", unsafe_allow_html=True)
48
+
49
+ # # Authenticate and initialize Earth Engine
50
+ # earthengine_credentials = os.environ.get("EE_Authentication")
51
+
52
+ # # Initialize Earth Engine with secret credentials
53
+ # os.makedirs(os.path.expanduser("~/.config/earthengine/"), exist_ok=True)
54
+ # with open(os.path.expanduser("~/.config/earthengine/credentials"), "w") as f:
55
+ # f.write(earthengine_credentials)
56
+
57
+ # ee.Initialize(project='ee-yashsacisro24')
58
+
59
+ # # Imagery base selection
60
+ # imagery_base = st.selectbox("Select Imagery Base", ["Sentinel", "Landsat", "MODIS", "Custom Input"], index=0)
61
+
62
+ # # Load the appropriate dataset based on imagery base
63
+ # if imagery_base == "Sentinel":
64
+ # dataset_file = "sentinel_datasets.json"
65
+ # with open(dataset_file) as f:
66
+ # data = json.load(f)
67
+ # elif imagery_base == "Landsat":
68
+ # dataset_file = "landsat_datasets.json"
69
+ # with open(dataset_file) as f:
70
+ # data = json.load(f)
71
+ # elif imagery_base == "MODIS":
72
+ # dataset_file = "modis_datasets.json"
73
+ # with open(dataset_file) as f:
74
+ # data = json.load(f)
75
+ # elif imagery_base == "Custom Input":
76
+ # custom_dataset_id = st.text_input("Enter Custom Earth Engine Dataset ID (e.g., ee.ImageCollection('AHN/AHN4'))", value="")
77
+ # if custom_dataset_id:
78
+ # try:
79
+ # # Remove potential "ee.ImageCollection()" wrapper for simplicity
80
+ # if custom_dataset_id.startswith("ee.ImageCollection("):
81
+ # custom_dataset_id = custom_dataset_id.replace("ee.ImageCollection('", "").replace("')", "")
82
+ # # Fetch dataset info from GEE
83
+ # collection = ee.ImageCollection(custom_dataset_id)
84
+ # band_names = collection.first().bandNames().getInfo()
85
+ # data = {
86
+ # f"Custom Dataset: {custom_dataset_id}": {
87
+ # "sub_options": {custom_dataset_id: f"Custom Dataset ({custom_dataset_id})"},
88
+ # "bands": {custom_dataset_id: band_names}
89
+ # }
90
+ # }
91
+ # st.write(f"Fetched bands for {custom_dataset_id}: {', '.join(band_names)}")
92
+ # except Exception as e:
93
+ # st.error(f"Error fetching dataset: {str(e)}. Please check the dataset ID and ensure it's valid in Google Earth Engine.")
94
+ # data = {}
95
+ # else:
96
+ # st.warning("Please enter a custom dataset ID to proceed.")
97
+ # data = {}
98
+
99
+ # # Display the title for the Streamlit app
100
+ # st.title(f"{imagery_base} Dataset")
101
+
102
+ # # Select dataset category (main selection)
103
+ # if data:
104
+ # main_selection = st.selectbox(f"Select {imagery_base} Dataset Category", list(data.keys()))
105
+ # else:
106
+ # main_selection = None
107
+
108
+ # # Initialize sub_selection and dataset_id as None
109
+ # sub_selection = None
110
+ # dataset_id = None
111
+
112
+ # # If a category is selected, display the sub-options (specific datasets)
113
+ # if main_selection:
114
+ # sub_options = data[main_selection]["sub_options"]
115
+ # sub_selection = st.selectbox(f"Select Specific {imagery_base} Dataset ID", list(sub_options.keys()))
116
+
117
+ # # Display the selected dataset ID and its availability based on user input
118
+ # if sub_selection:
119
+ # st.write(f"You selected: {main_selection} -> {sub_options[sub_selection]}")
120
+ # st.write(f"Dataset ID: {sub_selection}")
121
+ # dataset_id = sub_selection # Use the key directly as the dataset ID
122
+
123
+ # # Fetch and display dataset availability in green text
124
+ # try:
125
+ # # Create an Earth Engine ImageCollection object for the selected dataset
126
+ # collection = ee.ImageCollection(dataset_id)
127
+
128
+ # # Get the date range of the collection
129
+ # range_info = collection.reduceColumns(
130
+ # reducer=ee.Reducer.minMax(),
131
+ # selectors=['system:time_start']
132
+ # ).getInfo()
133
+
134
+ # # Extract min and max timestamps (in milliseconds) and convert to readable dates
135
+ # min_time = range_info.get('min', None)
136
+ # max_time = range_info.get('max', None)
137
+
138
+ # if min_time and max_time:
139
+ # start_date = datetime.fromtimestamp(min_time / 1000).strftime('%Y-%m-%d')
140
+ # end_date = datetime.fromtimestamp(max_time / 1000).strftime('%Y-%m-%d')
141
+ # st.markdown(
142
+ # f"<strong>Dataset Availability:</strong> From <span style='color: #fc0101;'>{start_date}</span> to <span style='color: #fc0101;'>{end_date}</span>",
143
+ # unsafe_allow_html=True
144
+ # )
145
+ # else:
146
+ # st.markdown(
147
+ # f"<span style='color: #fc0101;'><strong>Dataset Availability:</strong> Date range not available.</span>",
148
+ # unsafe_allow_html=True
149
+ # )
150
+
151
+ # except Exception as e:
152
+ # st.error(f"Error fetching dataset availability: {str(e)}")
153
+
154
+ # # Earth Engine Index Calculator Section
155
+ # st.header("Earth Engine Index Calculator")
156
+
157
+ # # Load band information based on selected dataset
158
+ # if main_selection and sub_selection:
159
+ # dataset_bands = data[main_selection]["bands"].get(sub_selection, [])
160
+ # st.write(f"Available Bands for {sub_options[sub_selection]}: {', '.join(dataset_bands)}")
161
+
162
+ # # Allow user to select 1 or 2 bands
163
+ # selected_bands = st.multiselect(
164
+ # "Select 1 or 2 Bands for Calculation",
165
+ # options=dataset_bands,
166
+ # default=[dataset_bands[0]] if dataset_bands else [],
167
+ # help=f"Select 1 or 2 bands from: {', '.join(dataset_bands)}"
168
+ # )
169
+
170
+ # # Ensure minimum 1 and maximum 2 bands are selected
171
+ # if len(selected_bands) < 1:
172
+ # st.warning("Please select at least one band.")
173
+ # st.stop()
174
+
175
+ # # Show custom formula input if bands are selected
176
+ # if selected_bands:
177
+ # # Provide a default formula based on the number of selected bands
178
+ # if len(selected_bands) == 1:
179
+ # default_formula = f"{selected_bands[0]}"
180
+ # example = f"'{selected_bands[0]} * 2' or '{selected_bands[0]} + 1'"
181
+ # else: # len(selected_bands) == 2
182
+ # default_formula = f"({selected_bands[0]} - {selected_bands[1]}) / ({selected_bands[0]} + {selected_bands[1]})"
183
+ # example = f"'{selected_bands[0]} * {selected_bands[1]} / 2' or '({selected_bands[0]} - {selected_bands[1]}) / ({selected_bands[0]} + {selected_bands[1]})'"
184
+
185
+ # custom_formula = st.text_input(
186
+ # "Enter Custom Formula (e.g (B8 - B4) / (B8 + B4) , B4*B3/2)",
187
+ # value=default_formula,
188
+ # help=f"Use only these bands: {', '.join(selected_bands)}. Examples: {example}"
189
+ # )
190
+
191
+ # # Validate the formula
192
+ # def validate_formula(formula, selected_bands):
193
+ # allowed_chars = set(" +-*/()0123456789.")
194
+ # terms = re.findall(r'[a-zA-Z][a-zA-Z0-9_]*', formula)
195
+ # invalid_terms = [term for term in terms if term not in selected_bands]
196
+ # if invalid_terms:
197
+ # return False, f"Invalid terms in formula: {', '.join(invalid_terms)}. Use only {', '.join(selected_bands)}."
198
+ # if not all(char in allowed_chars or char in ''.join(selected_bands) for char in formula):
199
+ # return False, "Formula contains invalid characters. Use only bands, numbers, and operators (+, -, *, /, ())"
200
+ # return True, ""
201
+
202
+ # is_valid, error_message = validate_formula(custom_formula, selected_bands)
203
+ # if not is_valid:
204
+ # st.error(error_message)
205
+ # st.stop()
206
+ # elif not custom_formula:
207
+ # st.warning("Please enter a custom formula to proceed.")
208
+ # st.stop()
209
+
210
+ # # Display the validated formula
211
+ # st.write(f"Custom Formula: {custom_formula}")
212
+
213
+ # # Function to get the corresponding reducer based on user input
214
+ # def get_reducer(reducer_name):
215
+ # reducers = {
216
+ # 'mean': ee.Reducer.mean(),
217
+ # 'sum': ee.Reducer.sum(),
218
+ # 'median': ee.Reducer.median(),
219
+ # 'min': ee.Reducer.min(),
220
+ # 'max': ee.Reducer.max(),
221
+ # 'count': ee.Reducer.count(),
222
+ # }
223
+ # return reducers.get(reducer_name.lower(), ee.Reducer.mean())
224
+
225
+ # # Streamlit selectbox for reducer choice
226
+ # reducer_choice = st.selectbox(
227
+ # "Select Reducer (e.g, mean , sum , median , min , max , count)",
228
+ # ['mean', 'sum', 'median', 'min', 'max', 'count'],
229
+ # index=0 # Default to 'mean'
230
+ # )
231
+
232
+ # # Function to convert geometry to Earth Engine format
233
+ # def convert_to_ee_geometry(geometry):
234
+ # if isinstance(geometry, base.BaseGeometry):
235
+ # if geometry.is_valid:
236
+ # geojson = geometry.__geo_interface__
237
+ # return ee.Geometry(geojson)
238
+ # else:
239
+ # raise ValueError("Invalid geometry: The polygon geometry is not valid.")
240
+ # elif isinstance(geometry, dict) or isinstance(geometry, str):
241
+ # try:
242
+ # if isinstance(geometry, str):
243
+ # geometry = json.loads(geometry)
244
+ # if 'type' in geometry and 'coordinates' in geometry:
245
+ # return ee.Geometry(geometry)
246
+ # else:
247
+ # raise ValueError("GeoJSON format is invalid.")
248
+ # except Exception as e:
249
+ # raise ValueError(f"Error parsing GeoJSON: {e}")
250
+ # elif isinstance(geometry, str) and geometry.lower().endswith(".kml"):
251
+ # try:
252
+ # tree = ET.parse(geometry)
253
+ # kml_root = tree.getroot()
254
+ # kml_namespace = {'kml': 'http://www.opengis.net/kml/2.2'}
255
+ # coordinates = kml_root.findall(".//kml:coordinates", kml_namespace)
256
+ # if coordinates:
257
+ # coords_text = coordinates[0].text.strip()
258
+ # coords = coords_text.split()
259
+ # coords = [tuple(map(float, coord.split(','))) for coord in coords]
260
+ # geojson = {"type": "Polygon", "coordinates": [coords]}
261
+ # return ee.Geometry(geojson)
262
+ # else:
263
+ # raise ValueError("KML does not contain valid coordinates.")
264
+ # except Exception as e:
265
+ # raise ValueError(f"Error parsing KML: {e}")
266
+ # else:
267
+ # raise ValueError("Unsupported geometry input type. Supported types are Shapely, GeoJSON, and KML.")
268
+
269
+ # # Date Input for Start and End Dates
270
+ # start_date = st.date_input("Start Date", value=pd.to_datetime('2024-11-01'))
271
+ # end_date = st.date_input("End Date", value=pd.to_datetime('2024-12-01'))
272
+
273
+ # # Convert start_date and end_date to string format for Earth Engine
274
+ # start_date_str = start_date.strftime('%Y-%m-%d')
275
+ # end_date_str = end_date.strftime('%Y-%m-%d')
276
+
277
+ # # Aggregation period selection
278
+ # aggregation_period = st.selectbox(
279
+ # "Select Aggregation Period (e.g, Custom(Start Date to End Date) , Weekly , Monthly , Yearly)",
280
+ # ["Custom (Start Date to End Date)", "Weekly", "Monthly", "Yearly"],
281
+ # index=0
282
+ # )
283
+
284
+ # # Ask user whether they want to process 'Point' or 'Polygon' data
285
+ # shape_type = st.selectbox("Do you want to process 'Point' or 'Polygon' data?", ["Point", "Polygon"])
286
+
287
+ # # Additional options based on shape type
288
+ # kernel_size = None
289
+ # include_boundary = None
290
+ # if shape_type.lower() == "point":
291
+ # kernel_size = st.selectbox(
292
+ # "Select Calculation Area(e.g, Point , 3x3 Kernel , 5x5 Kernel)",
293
+ # ["Point", "3x3 Kernel", "5x5 Kernel"],
294
+ # index=0,
295
+ # help="Choose 'Point' for exact point calculation, or a kernel size for area averaging."
296
+ # )
297
+ # elif shape_type.lower() == "polygon":
298
+ # include_boundary = st.checkbox(
299
+ # "Include Boundary Pixels",
300
+ # value=True,
301
+ # help="Check to include pixels on the polygon boundary; uncheck to exclude them."
302
+ # )
303
+
304
+ # # Ask user to upload a file based on shape type
305
+ # file_upload = st.file_uploader(f"Upload your {shape_type} data (CSV, GeoJSON, KML)", type=["csv", "geojson", "kml"])
306
+
307
+ # if file_upload is not None:
308
+ # # Read the user-uploaded file
309
+ # if shape_type.lower() == "point":
310
+ # if file_upload.name.endswith('.csv'):
311
+ # locations_df = pd.read_csv(file_upload)
312
+ # elif file_upload.name.endswith('.geojson'):
313
+ # locations_df = gpd.read_file(file_upload)
314
+ # elif file_upload.name.endswith('.kml'):
315
+ # locations_df = gpd.read_file(file_upload)
316
+ # else:
317
+ # st.error("Unsupported file format. Please upload CSV, GeoJSON, or KML.")
318
+ # locations_df = pd.DataFrame()
319
+
320
+ # if 'geometry' in locations_df.columns:
321
+ # if locations_df.geometry.geom_type.isin(['Polygon', 'MultiPolygon']).any():
322
+ # st.warning("The uploaded file contains polygon data. Please select 'Polygon' for processing.")
323
+ # st.stop()
324
+
325
+ # with st.spinner('Processing Map...'):
326
+ # if locations_df is not None and not locations_df.empty:
327
+ # if 'geometry' in locations_df.columns:
328
+ # locations_df['latitude'] = locations_df['geometry'].y
329
+ # locations_df['longitude'] = locations_df['geometry'].x
330
+
331
+ # if 'latitude' not in locations_df.columns or 'longitude' not in locations_df.columns:
332
+ # st.error("Uploaded file is missing required 'latitude' or 'longitude' columns.")
333
+ # else:
334
+ # st.write("Preview of the uploaded points data:")
335
+ # st.dataframe(locations_df.head())
336
+ # m = leafmap.Map(center=[locations_df['latitude'].mean(), locations_df['longitude'].mean()], zoom=10)
337
+ # for _, row in locations_df.iterrows():
338
+ # latitude = row['latitude']
339
+ # longitude = row['longitude']
340
+ # if pd.isna(latitude) or pd.isna(longitude):
341
+ # continue
342
+ # m.add_marker(location=[latitude, longitude], popup=row.get('name', 'No Name'))
343
+ # st.write("Map of Uploaded Points:")
344
+ # m.to_streamlit()
345
+ # st.session_state.map_data = m
346
+
347
+ # elif shape_type.lower() == "polygon":
348
+ # if file_upload.name.endswith('.csv'):
349
+ # locations_df = pd.read_csv(file_upload)
350
+ # elif file_upload.name.endswith('.geojson'):
351
+ # locations_df = gpd.read_file(file_upload)
352
+ # elif file_upload.name.endswith('.kml'):
353
+ # locations_df = gpd.read_file(file_upload)
354
+ # else:
355
+ # st.error("Unsupported file format. Please upload CSV, GeoJSON, or KML.")
356
+ # locations_df = pd.DataFrame()
357
+
358
+ # if 'geometry' in locations_df.columns:
359
+ # if locations_df.geometry.geom_type.isin(['Point', 'MultiPoint']).any():
360
+ # st.warning("The uploaded file contains point data. Please select 'Point' for processing.")
361
+ # st.stop()
362
+
363
+ # with st.spinner('Processing Map...'):
364
+ # if locations_df is not None and not locations_df.empty:
365
+ # if 'geometry' not in locations_df.columns:
366
+ # st.error("Uploaded file is missing required 'geometry' column.")
367
+ # else:
368
+ # st.write("Preview of the uploaded polygons data:")
369
+ # st.dataframe(locations_df.head())
370
+ # centroid_lat = locations_df.geometry.centroid.y.mean()
371
+ # centroid_lon = locations_df.geometry.centroid.x.mean()
372
+ # m = leafmap.Map(center=[centroid_lat, centroid_lon], zoom=10)
373
+ # for _, row in locations_df.iterrows():
374
+ # polygon = row['geometry']
375
+ # if polygon.is_valid:
376
+ # gdf = gpd.GeoDataFrame([row], geometry=[polygon], crs=locations_df.crs)
377
+ # m.add_gdf(gdf=gdf, layer_name=row.get('name', 'Unnamed Polygon'))
378
+ # st.write("Map of Uploaded Polygons:")
379
+ # m.to_streamlit()
380
+ # st.session_state.map_data = m
381
+
382
+ # # Initialize session state for storing results
383
+ # if 'results' not in st.session_state:
384
+ # st.session_state.results = []
385
+ # if 'last_params' not in st.session_state:
386
+ # st.session_state.last_params = {}
387
+ # if 'map_data' not in st.session_state:
388
+ # st.session_state.map_data = None
389
+ # if 'show_example' not in st.session_state:
390
+ # st.session_state.show_example = True
391
+
392
+ # # Function to check if parameters have changed
393
+ # def parameters_changed():
394
+ # return (
395
+ # st.session_state.last_params.get('main_selection') != main_selection or
396
+ # st.session_state.last_params.get('dataset_id') != dataset_id or
397
+ # st.session_state.last_params.get('selected_bands') != selected_bands or
398
+ # st.session_state.last_params.get('custom_formula') != custom_formula or
399
+ # st.session_state.last_params.get('start_date_str') != start_date_str or
400
+ # st.session_state.last_params.get('end_date_str') != end_date_str or
401
+ # st.session_state.last_params.get('shape_type') != shape_type or
402
+ # st.session_state.last_params.get('file_upload') != file_upload or
403
+ # st.session_state.last_params.get('kernel_size') != kernel_size or
404
+ # st.session_state.last_params.get('include_boundary') != include_boundary
405
+ # )
406
+
407
+ # # If parameters have changed, reset the results
408
+ # if parameters_changed():
409
+ # st.session_state.results = []
410
+ # st.session_state.last_params = {
411
+ # 'main_selection': main_selection,
412
+ # 'dataset_id': dataset_id,
413
+ # 'selected_bands': selected_bands,
414
+ # 'custom_formula': custom_formula,
415
+ # 'start_date_str': start_date_str,
416
+ # 'end_date_str': end_date_str,
417
+ # 'shape_type': shape_type,
418
+ # 'file_upload': file_upload,
419
+ # 'kernel_size': kernel_size,
420
+ # 'include_boundary': include_boundary
421
+ # }
422
+
423
+ # # Function to calculate custom formula
424
+ # def calculate_custom_formula(image, geometry, selected_bands, custom_formula, reducer_choice, scale=30):
425
+ # try:
426
+ # band_values = {}
427
+ # band_names = image.bandNames().getInfo()
428
+
429
+ # for band in selected_bands:
430
+ # if band not in band_names:
431
+ # raise ValueError(f"Band '{band}' not found in the dataset.")
432
+ # band_values[band] = image.select(band)
433
+
434
+ # reducer = get_reducer(reducer_choice)
435
+ # reduced_values = {}
436
+ # for band in selected_bands:
437
+ # value = band_values[band].reduceRegion(
438
+ # reducer=reducer,
439
+ # geometry=geometry,
440
+ # scale=scale
441
+ # ).get(band).getInfo()
442
+ # reduced_values[band] = float(value if value is not None else 0)
443
+
444
+ # formula = custom_formula
445
+ # for band in selected_bands:
446
+ # formula = formula.replace(band, str(reduced_values[band]))
447
+
448
+ # result = eval(formula, {"__builtins__": {}}, reduced_values)
449
+ # if not isinstance(result, (int, float)):
450
+ # raise ValueError("Formula did not result in a numeric value.")
451
+
452
+ # return ee.Image.constant(result).rename('custom_result')
453
+
454
+ # except ZeroDivisionError:
455
+ # st.error("Error: Division by zero in the formula.")
456
+ # return ee.Image(0).rename('custom_result').set('error', 'Division by zero')
457
+ # except SyntaxError:
458
+ # st.error(f"Error: Invalid syntax in formula '{custom_formula}'.")
459
+ # return ee.Image(0).rename('custom_result').set('error', 'Invalid syntax')
460
+ # except ValueError as e:
461
+ # st.error(f"Error: {str(e)}")
462
+ # return ee.Image(0).rename('custom_result').set('error', str(e))
463
+ # except Exception as e:
464
+ # st.error(f"Unexpected error: {e}")
465
+ # return ee.Image(0).rename('custom_result').set('error', str(e))
466
+
467
+ # # Function to calculate index for a period
468
+ # def calculate_index_for_period(image, roi, selected_bands, custom_formula, reducer_choice):
469
+ # return calculate_custom_formula(image, roi, selected_bands, custom_formula, reducer_choice)
470
+
471
+ # # Aggregation functions
472
+ # def aggregate_data_custom(collection):
473
+ # collection = collection.map(lambda image: image.set('day', ee.Date(image.get('system:time_start')).format('YYYY-MM-dd')))
474
+ # grouped_by_day = collection.aggregate_array('day').distinct()
475
+ # def calculate_daily_mean(day):
476
+ # daily_collection = collection.filter(ee.Filter.eq('day', day))
477
+ # daily_mean = daily_collection.mean()
478
+ # return daily_mean.set('day', day)
479
+ # daily_images = ee.List(grouped_by_day.map(calculate_daily_mean))
480
+ # return ee.ImageCollection(daily_images)
481
+
482
+ # def aggregate_data_weekly(collection):
483
+ # def set_week_start(image):
484
+ # date = ee.Date(image.get('system:time_start'))
485
+ # days_since_week_start = date.getRelative('day', 'week')
486
+ # offset = ee.Number(days_since_week_start).multiply(-1)
487
+ # week_start = date.advance(offset, 'day')
488
+ # return image.set('week_start', week_start.format('YYYY-MM-dd'))
489
+ # collection = collection.map(set_week_start)
490
+ # grouped_by_week = collection.aggregate_array('week_start').distinct()
491
+ # def calculate_weekly_mean(week_start):
492
+ # weekly_collection = collection.filter(ee.Filter.eq('week_start', week_start))
493
+ # weekly_mean = weekly_collection.mean()
494
+ # return weekly_mean.set('week_start', week_start)
495
+ # weekly_images = ee.List(grouped_by_week.map(calculate_weekly_mean))
496
+ # return ee.ImageCollection(weekly_images)
497
+
498
+ # def aggregate_data_monthly(collection, start_date, end_date):
499
+ # collection = collection.filterDate(start_date, end_date)
500
+ # collection = collection.map(lambda image: image.set('month', ee.Date(image.get('system:time_start')).format('YYYY-MM')))
501
+ # grouped_by_month = collection.aggregate_array('month').distinct()
502
+ # def calculate_monthly_mean(month):
503
+ # monthly_collection = collection.filter(ee.Filter.eq('month', month))
504
+ # monthly_mean = monthly_collection.mean()
505
+ # return monthly_mean.set('month', month)
506
+ # monthly_images = ee.List(grouped_by_month.map(calculate_monthly_mean))
507
+ # return ee.ImageCollection(monthly_images)
508
+
509
+ # def aggregate_data_yearly(collection):
510
+ # collection = collection.map(lambda image: image.set('year', ee.Date(image.get('system:time_start')).format('YYYY')))
511
+ # grouped_by_year = collection.aggregate_array('year').distinct()
512
+ # def calculate_yearly_mean(year):
513
+ # yearly_collection = collection.filter(ee.Filter.eq('year', year))
514
+ # yearly_mean = yearly_collection.mean()
515
+ # return yearly_mean.set('year', year)
516
+ # yearly_images = ee.List(grouped_by_year.map(calculate_yearly_mean))
517
+ # return ee.ImageCollection(yearly_images)
518
+
519
+ # # Process aggregation function
520
+ # def process_aggregation(locations_df, start_date_str, end_date_str, dataset_id, selected_bands, reducer_choice, shape_type, aggregation_period, custom_formula="", kernel_size=None, include_boundary=None):
521
+ # aggregated_results = []
522
+
523
+ # if not custom_formula:
524
+ # st.error("Custom formula cannot be empty. Please provide a formula.")
525
+ # return aggregated_results
526
+
527
+ # total_steps = len(locations_df)
528
+ # progress_bar = st.progress(0)
529
+ # progress_text = st.empty()
530
+
531
+ # with st.spinner('Processing data...'):
532
+ # if shape_type.lower() == "point":
533
+ # for idx, row in locations_df.iterrows():
534
+ # latitude = row.get('latitude')
535
+ # longitude = row.get('longitude')
536
+ # if pd.isna(latitude) or pd.isna(longitude):
537
+ # st.warning(f"Skipping location {idx} with missing latitude or longitude")
538
+ # continue
539
+
540
+ # location_name = row.get('name', f"Location_{idx}")
541
+
542
+ # if kernel_size == "3x3 Kernel":
543
+ # buffer_size = 45 # 90m x 90m
544
+ # roi = ee.Geometry.Point([longitude, latitude]).buffer(buffer_size).bounds()
545
+ # elif kernel_size == "5x5 Kernel":
546
+ # buffer_size = 75 # 150m x 150m
547
+ # roi = ee.Geometry.Point([longitude, latitude]).buffer(buffer_size).bounds()
548
+ # else: # Point
549
+ # roi = ee.Geometry.Point([longitude, latitude])
550
+
551
+ # collection = ee.ImageCollection(dataset_id) \
552
+ # .filterDate(ee.Date(start_date_str), ee.Date(end_date_str)) \
553
+ # .filterBounds(roi)
554
+
555
+ # if aggregation_period.lower() == 'custom (start date to end date)':
556
+ # collection = aggregate_data_custom(collection)
557
+ # elif aggregation_period.lower() == 'weekly':
558
+ # collection = aggregate_data_weekly(collection)
559
+ # elif aggregation_period.lower() == 'monthly':
560
+ # collection = aggregate_data_monthly(collection, start_date_str, end_date_str)
561
+ # elif aggregation_period.lower() == 'yearly':
562
+ # collection = aggregate_data_yearly(collection)
563
+
564
+ # image_list = collection.toList(collection.size())
565
+ # processed_weeks = set()
566
+ # for i in range(image_list.size().getInfo()):
567
+ # image = ee.Image(image_list.get(i))
568
+
569
+ # if aggregation_period.lower() == 'custom (start date to end date)':
570
+ # timestamp = image.get('day')
571
+ # period_label = 'Date'
572
+ # date = ee.Date(timestamp).format('YYYY-MM-dd').getInfo()
573
+ # elif aggregation_period.lower() == 'weekly':
574
+ # timestamp = image.get('week_start')
575
+ # period_label = 'Week'
576
+ # date = ee.String(timestamp).getInfo()
577
+ # if (pd.to_datetime(date) < pd.to_datetime(start_date_str) or
578
+ # pd.to_datetime(date) > pd.to_datetime(end_date_str) or
579
+ # date in processed_weeks):
580
+ # continue
581
+ # processed_weeks.add(date)
582
+ # elif aggregation_period.lower() == 'monthly':
583
+ # timestamp = image.get('month')
584
+ # period_label = 'Month'
585
+ # date = ee.Date(timestamp).format('YYYY-MM').getInfo()
586
+ # elif aggregation_period.lower() == 'yearly':
587
+ # timestamp = image.get('year')
588
+ # period_label = 'Year'
589
+ # date = ee.Date(timestamp).format('YYYY').getInfo()
590
+
591
+ # index_image = calculate_index_for_period(image, roi, selected_bands, custom_formula, reducer_choice)
592
+
593
+ # try:
594
+ # index_value = index_image.reduceRegion(
595
+ # reducer=get_reducer(reducer_choice),
596
+ # geometry=roi,
597
+ # scale=30
598
+ # ).get('custom_result')
599
+
600
+ # calculated_value = index_value.getInfo()
601
+
602
+ # if isinstance(calculated_value, (int, float)):
603
+ # aggregated_results.append({
604
+ # 'Location Name': location_name,
605
+ # 'Latitude': latitude,
606
+ # 'Longitude': longitude,
607
+ # period_label: date,
608
+ # 'Start Date': start_date_str,
609
+ # 'End Date': end_date_str,
610
+ # 'Calculated Value': calculated_value
611
+ # })
612
+ # else:
613
+ # st.warning(f"Skipping invalid value for {location_name} on {date}")
614
+ # except Exception as e:
615
+ # st.error(f"Error retrieving value for {location_name}: {e}")
616
+
617
+ # progress_percentage = (idx + 1) / total_steps
618
+ # progress_bar.progress(progress_percentage)
619
+ # progress_text.markdown(f"Processing: {int(progress_percentage * 100)}%")
620
+
621
+ # elif shape_type.lower() == "polygon":
622
+ # for idx, row in locations_df.iterrows():
623
+ # polygon_name = row.get('name', f"Polygon_{idx}")
624
+ # polygon_geometry = row.get('geometry')
625
+ # location_name = polygon_name
626
+
627
+ # try:
628
+ # roi = convert_to_ee_geometry(polygon_geometry)
629
+ # if not include_boundary:
630
+ # roi = roi.buffer(-30).bounds()
631
+ # except ValueError as e:
632
+ # st.warning(f"Skipping invalid polygon {polygon_name}: {e}")
633
+ # continue
634
+
635
+ # collection = ee.ImageCollection(dataset_id) \
636
+ # .filterDate(ee.Date(start_date_str), ee.Date(end_date_str)) \
637
+ # .filterBounds(roi)
638
+
639
+ # if aggregation_period.lower() == 'custom (start date to end date)':
640
+ # collection = aggregate_data_custom(collection)
641
+ # elif aggregation_period.lower() == 'weekly':
642
+ # collection = aggregate_data_weekly(collection)
643
+ # elif aggregation_period.lower() == 'monthly':
644
+ # collection = aggregate_data_monthly(collection, start_date_str, end_date_str)
645
+ # elif aggregation_period.lower() == 'yearly':
646
+ # collection = aggregate_data_yearly(collection)
647
+
648
+ # image_list = collection.toList(collection.size())
649
+ # processed_weeks = set()
650
+ # for i in range(image_list.size().getInfo()):
651
+ # image = ee.Image(image_list.get(i))
652
+
653
+ # if aggregation_period.lower() == 'custom (start date to end date)':
654
+ # timestamp = image.get('day')
655
+ # period_label = 'Date'
656
+ # date = ee.Date(timestamp).format('YYYY-MM-dd').getInfo()
657
+ # elif aggregation_period.lower() == 'weekly':
658
+ # timestamp = image.get('week_start')
659
+ # period_label = 'Week'
660
+ # date = ee.String(timestamp).getInfo()
661
+ # if (pd.to_datetime(date) < pd.to_datetime(start_date_str) or
662
+ # pd.to_datetime(date) > pd.to_datetime(end_date_str) or
663
+ # date in processed_weeks):
664
+ # continue
665
+ # processed_weeks.add(date)
666
+ # elif aggregation_period.lower() == 'monthly':
667
+ # timestamp = image.get('month')
668
+ # period_label = 'Month'
669
+ # date = ee.Date(timestamp).format('YYYY-MM').getInfo()
670
+ # elif aggregation_period.lower() == 'yearly':
671
+ # timestamp = image.get('year')
672
+ # period_label = 'Year'
673
+ # date = ee.Date(timestamp).format('YYYY').getInfo()
674
+
675
+ # index_image = calculate_index_for_period(image, roi, selected_bands, custom_formula, reducer_choice)
676
+
677
+ # try:
678
+ # index_value = index_image.reduceRegion(
679
+ # reducer=get_reducer(reducer_choice),
680
+ # geometry=roi,
681
+ # scale=30
682
+ # ).get('custom_result')
683
+
684
+ # calculated_value = index_value.getInfo()
685
+
686
+ # if isinstance(calculated_value, (int, float)):
687
+ # aggregated_results.append({
688
+ # 'Location Name': location_name,
689
+ # period_label: date,
690
+ # 'Start Date': start_date_str,
691
+ # 'End Date': end_date_str,
692
+ # 'Calculated Value': calculated_value
693
+ # })
694
+ # else:
695
+ # st.warning(f"Skipping invalid value for {location_name} on {date}")
696
+ # except Exception as e:
697
+ # st.error(f"Error retrieving value for {location_name}: {e}")
698
+
699
+ # progress_percentage = (idx + 1) / total_steps
700
+ # progress_bar.progress(progress_percentage)
701
+ # progress_text.markdown(f"Processing: {int(progress_percentage * 100)}%")
702
+
703
+ # if aggregated_results:
704
+ # result_df = pd.DataFrame(aggregated_results)
705
+ # if aggregation_period.lower() == 'custom (start date to end date)':
706
+ # agg_dict = {
707
+ # 'Start Date': 'first',
708
+ # 'End Date': 'first',
709
+ # 'Calculated Value': 'mean'
710
+ # }
711
+ # if shape_type.lower() == 'point':
712
+ # agg_dict['Latitude'] = 'first'
713
+ # agg_dict['Longitude'] = 'first'
714
+ # aggregated_output = result_df.groupby('Location Name').agg(agg_dict).reset_index()
715
+ # aggregated_output.rename(columns={'Calculated Value': 'Aggregated Value'}, inplace=True)
716
+ # return aggregated_output.to_dict(orient='records')
717
+ # else:
718
+ # return result_df.to_dict(orient='records')
719
+ # return []
720
+
721
+ # # Button to trigger calculation
722
+ # if st.button(f"Calculate {custom_formula}"):
723
+ # if file_upload is not None:
724
+ # if shape_type.lower() in ["point", "polygon"]:
725
+ # results = process_aggregation(
726
+ # locations_df,
727
+ # start_date_str,
728
+ # end_date_str,
729
+ # dataset_id,
730
+ # selected_bands,
731
+ # reducer_choice,
732
+ # shape_type,
733
+ # aggregation_period,
734
+ # custom_formula,
735
+ # kernel_size=kernel_size,
736
+ # include_boundary=include_boundary
737
+ # )
738
+ # if results:
739
+ # result_df = pd.DataFrame(results)
740
+ # st.write(f"Processed Results Table ({aggregation_period}) for Formula: {custom_formula}")
741
+ # st.dataframe(result_df)
742
+ # filename = f"{main_selection}_{dataset_id}_{start_date.strftime('%Y%m%d')}_{end_date.strftime('%Y%m%d')}_{aggregation_period.lower()}.csv"
743
+ # st.download_button(
744
+ # label="Download results as CSV",
745
+ # data=result_df.to_csv(index=False).encode('utf-8'),
746
+ # file_name=filename,
747
+ # mime='text/csv'
748
+ # )
749
+ # # Show an example calculation
750
+ # if st.session_state.show_example and results:
751
+ # example_result = results[0]
752
+ # example_image = ee.ImageCollection(dataset_id).filterDate(start_date_str, end_date_str).first()
753
+ # example_roi = (
754
+ # ee.Geometry.Point([example_result['Longitude'], example_result['Latitude']])
755
+ # if shape_type.lower() == 'point'
756
+ # else convert_to_ee_geometry(locations_df['geometry'].iloc[0])
757
+ # )
758
+ # example_values = {}
759
+ # for band in selected_bands:
760
+ # value = example_image.select(band).reduceRegion(
761
+ # reducer=get_reducer(reducer_choice),
762
+ # geometry=example_roi,
763
+ # scale=30
764
+ # ).get(band).getInfo()
765
+ # example_values[band] = float(value if value is not None else 0)
766
+ # example_formula = custom_formula
767
+ # for band in selected_bands:
768
+ # example_formula = example_formula.replace(band, str(example_values[band]))
769
+ # # st.write(f"Example Calculation: {custom_formula} -> {example_formula} = {example_result.get('Calculated Value', example_result.get('Aggregated Value'))}")
770
+ # st.session_state.show_example = False
771
+ # st.success('Processing complete!')
772
+ # else:
773
+ # st.warning("No results were generated. Check your inputs or formula.")
774
+ # else:
775
+ # st.warning("Please upload a file to process.")
776
+ # else:
777
+ # st.warning("Please upload a file to proceed.")
778
+
779
+
780
+
781
  import streamlit as st
782
  import json
783
  import ee
 
788
  import leafmap.foliumap as leafmap
789
  import re
790
  from shapely.geometry import base
791
+ # from lxml import etree
792
+ # from xml.etree import ElementTree as ET
793
+ from xml.etree import ElementTree as XET
794
+
795
 
796
  # Set up the page layout
797
  st.set_page_config(layout="wide")
 
822
  # Title
823
  st.markdown(
824
  f"""
825
+ <div style="display: flex; flex-direction: column; align-items: center;">
826
+ <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/SATRANG.png" style="width: 30%;">
827
+ <h3 style="text-align: center; margin: 0;">( Spatial and Temporal Aggregation for Remote-sensing and Analysis of Natural Geodata )</h3>
828
+ </div>
829
+ <hr>
830
  """,
831
  unsafe_allow_html=True,
832
  )
833
+ # st.markdown(
834
+ # f"""
835
+ # <div style="text-align: center; background-image: url('https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/B1.jpg'); background-size: cover; padding: 20px;">
836
+ # <h1 style="display: inline-block; margin: 0;">
837
+ # <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/B1.png" style="width: 20%; vertical-align: middle; margin-right: 10px;">
838
+ # BHOOMI
839
+ # </h1>
840
+ # <h3 style="margin: 0;">(Bandwise Harmonization & Optimized Output for multispectral integration)</h3>
841
+ # </div>
842
+ # <hr>
843
+ # """,
844
+ # unsafe_allow_html=True,
845
+ # )
846
+ # st.write("<h4><div style='text-align: center;'>User Inputs</div></h4>", unsafe_allow_html=True)
847
+
848
+ # st.markdown(
849
+ # f"""
850
+ # <div style="position: relative; text-align: center; padding: 20px;">
851
+ # <div style="background-image: url('https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/B1.jpg'); background-size: cover; position: absolute; top: 0; left: 0; right: 0; bottom: 0; z-index: 1;"></div>
852
+ # <div style="background-color: rgba(255, 255, 255, 0.2); position: absolute; top: 0; left: 0; right: 0; bottom: 0; z-index: 2;"></div>
853
+ # <div style="position: relative; z-index: 3;">
854
+ # <div style="display: flex; justify-content: space-between; align-items: center;">
855
+ # <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/ISRO_Logo.png" style="width: 20%; margin-right: auto;">
856
+ # <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/SAC_Logo.png" style="width: 20%; margin-left: auto;">
857
+ # </div>
858
+ # <h1 style="display: inline-block; margin: 0;">
859
+ # BHOOMI
860
+ # </h1>
861
+ # <h3 style="margin: 0;">(Bandwise Harmonization & Optimized Output for multispectral integration)</h3>
862
+ # </div>
863
+ # </div>
864
+ # <hr>
865
+ # """,
866
+ # unsafe_allow_html=True,
867
+ # )
868
 
869
+ st.markdown(
870
+ f"""
871
+ <h4 style="text-align: center;">User Inputs</h4>
872
+ """,
873
+ unsafe_allow_html=True,
874
+ )
875
  # Authenticate and initialize Earth Engine
876
  earthengine_credentials = os.environ.get("EE_Authentication")
877
 
 
882
 
883
  ee.Initialize(project='ee-yashsacisro24')
884
 
885
+ st.write("<h5>Image Collection</h5>", unsafe_allow_html=True)
886
+
887
  # Imagery base selection
888
  imagery_base = st.selectbox("Select Imagery Base", ["Sentinel", "Landsat", "MODIS", "Custom Input"], index=0)
889
 
 
925
  data = {}
926
 
927
  # Display the title for the Streamlit app
928
+ # st.title(f"{imagery_base} Dataset")
929
+ st.markdown(
930
+ f"""
931
+ <hr>
932
+ <h5><b>{imagery_base} Dataset</b></h5>
933
+ """,
934
+ unsafe_allow_html=True,
935
+ )
936
  # Select dataset category (main selection)
937
  if data:
938
  main_selection = st.selectbox(f"Select {imagery_base} Dataset Category", list(data.keys()))
 
948
  sub_options = data[main_selection]["sub_options"]
949
  sub_selection = st.selectbox(f"Select Specific {imagery_base} Dataset ID", list(sub_options.keys()))
950
 
951
+ # Display the selected dataset ID based on user input
952
  if sub_selection:
953
  st.write(f"You selected: {main_selection} -> {sub_options[sub_selection]}")
954
  st.write(f"Dataset ID: {sub_selection}")
955
  dataset_id = sub_selection # Use the key directly as the dataset ID
956
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
957
  # Earth Engine Index Calculator Section
958
+ # st.header("Earth Engine Index Calculator")
959
 
960
+ st.markdown(
961
+ f"""
962
+ <hr>
963
+ <h5><b>Earth Engine Index Calculator</b></h5>
964
+ """,
965
+ unsafe_allow_html=True,
966
+ )
967
  # Load band information based on selected dataset
968
  if main_selection and sub_selection:
969
  dataset_bands = data[main_selection]["bands"].get(sub_selection, [])
 
1019
 
1020
  # Display the validated formula
1021
  st.write(f"Custom Formula: {custom_formula}")
1022
+
1023
+ # The rest of your code (reducer, geometry conversion, date input, aggregation, etc.) remains unchanged...
1024
 
1025
  # Function to get the corresponding reducer based on user input
1026
  def get_reducer(reducer_name):
 
1041
  index=0 # Default to 'mean'
1042
  )
1043
 
1044
+ # # Function to convert geometry to Earth Engine format
1045
+ # def convert_to_ee_geometry(geometry):
1046
+ # if isinstance(geometry, base.BaseGeometry):
1047
+ # if geometry.is_valid:
1048
+ # geojson = geometry.__geo_interface__
1049
+ # return ee.Geometry(geojson)
1050
+ # else:
1051
+ # raise ValueError("Invalid geometry: The polygon geometry is not valid.")
1052
+ # elif isinstance(geometry, dict) or isinstance(geometry, str):
1053
+ # try:
1054
+ # if isinstance(geometry, str):
1055
+ # geometry = json.loads(geometry)
1056
+ # if 'type' in geometry and 'coordinates' in geometry:
1057
+ # return ee.Geometry(geometry)
1058
+ # else:
1059
+ # raise ValueError("GeoJSON format is invalid.")
1060
+ # except Exception as e:
1061
+ # raise ValueError(f"Error parsing GeoJSON: {e}")
1062
+ # elif isinstance(geometry, str) and geometry.lower().endswith(".kml"):
1063
+ # try:
1064
+ # tree = ET.parse(geometry)
1065
+ # kml_root = tree.getroot()
1066
+ # kml_namespace = {'kml': 'http://www.opengis.net/kml/2.2'}
1067
+ # coordinates = kml_root.findall(".//kml:coordinates", kml_namespace)
1068
+ # if coordinates:
1069
+ # coords_text = coordinates[0].text.strip()
1070
+ # coords = coords_text.split()
1071
+ # coords = [tuple(map(float, coord.split(','))) for coord in coords]
1072
+ # geojson = {"type": "Polygon", "coordinates": [coords]}
1073
+ # return ee.Geometry(geojson)
1074
+ # else:
1075
+ # raise ValueError("KML does not contain valid coordinates.")
1076
+ # except Exception as e:
1077
+ # raise ValueError(f"Error parsing KML: {e}")
1078
+ # else:
1079
+ # raise ValueError("Unsupported geometry input type. Supported types are Shapely, GeoJSON, and KML.")
1080
+
1081
  # Function to convert geometry to Earth Engine format
1082
  def convert_to_ee_geometry(geometry):
1083
+ st.write(f"Debug: convert_to_ee_geometry called with type - {type(geometry)}") # Debug input type
1084
  if isinstance(geometry, base.BaseGeometry):
1085
  if geometry.is_valid:
1086
  geojson = geometry.__geo_interface__
1087
+ st.write(f"Debug: Converting Shapely geometry to GeoJSON - {geojson}") # Debug GeoJSON
1088
  return ee.Geometry(geojson)
1089
  else:
1090
  raise ValueError("Invalid geometry: The polygon geometry is not valid.")
1091
+ elif isinstance(geometry, dict):
1092
+ if 'type' in geometry and 'coordinates' in geometry:
1093
+ return ee.Geometry(geometry)
1094
+ else:
1095
+ raise ValueError("GeoJSON format is invalid.")
1096
+ elif isinstance(geometry, str):
 
 
 
 
 
1097
  try:
1098
+ # If it’s a JSON string, parse it
1099
+ parsed = json.loads(geometry)
1100
+ if 'type' in parsed and 'coordinates' in parsed:
1101
+ return ee.Geometry(parsed)
 
 
 
 
 
 
1102
  else:
1103
+ raise ValueError("GeoJSON string format is invalid.")
1104
+ except json.JSONDecodeError:
1105
+ # If it’s a KML string (not a file path)
1106
+ try:
1107
+ root = XET.fromstring(geometry)
1108
+ ns = {'kml': 'http://www.opengis.net/kml/2.2'}
1109
+ coords_elem = root.find('.//kml:Polygon//kml:coordinates', ns)
1110
+ if coords_elem is not None:
1111
+ coords_text = ' '.join(coords_elem.text.split())
1112
+ st.write(f"Debug: KML string coordinates - {coords_text}") # Debug KML parsing
1113
+ coords = [tuple(map(float, coord.split(','))) for coord in coords_text.split()]
1114
+ geojson = {"type": "Polygon", "coordinates": [coords]}
1115
+ return ee.Geometry(geojson)
1116
+ else:
1117
+ raise ValueError("KML string does not contain valid coordinates.")
1118
+ except Exception as e:
1119
+ raise ValueError(f"Error parsing KML string: {e}")
1120
  else:
1121
+ raise ValueError(f"Unsupported geometry input type: {type(geometry)}. Supported types are Shapely, GeoJSON, and KML string.")
1122
+
1123
  # Date Input for Start and End Dates
1124
  start_date = st.date_input("Start Date", value=pd.to_datetime('2024-11-01'))
1125
  end_date = st.date_input("End Date", value=pd.to_datetime('2024-12-01'))
 
1155
  help="Check to include pixels on the polygon boundary; uncheck to exclude them."
1156
  )
1157
 
1158
+ # # Ask user to upload a file based on shape type
1159
+ # file_upload = st.file_uploader(f"Upload your {shape_type} data (CSV, GeoJSON, KML)", type=["csv", "geojson", "kml"])
1160
+
1161
+ # if file_upload is not None:
1162
+ # # Read the user-uploaded file
1163
+ # if shape_type.lower() == "point":
1164
+ # if file_upload.name.endswith('.csv'):
1165
+ # locations_df = pd.read_csv(file_upload)
1166
+ # elif file_upload.name.endswith('.geojson'):
1167
+ # locations_df = gpd.read_file(file_upload)
1168
+ # elif file_upload.name.endswith('.kml'):
1169
+ # locations_df = gpd.read_file(file_upload)
1170
+ # else:
1171
+ # st.error("Unsupported file format. Please upload CSV, GeoJSON, or KML.")
1172
+ # locations_df = pd.DataFrame()
1173
+
1174
+ # if 'geometry' in locations_df.columns:
1175
+ # if locations_df.geometry.geom_type.isin(['Polygon', 'MultiPolygon']).any():
1176
+ # st.warning("The uploaded file contains polygon data. Please select 'Polygon' for processing.")
1177
+ # st.stop()
1178
+
1179
+ # with st.spinner('Processing Map...'):
1180
+ # if locations_df is not None and not locations_df.empty:
1181
+ # if 'geometry' in locations_df.columns:
1182
+ # locations_df['latitude'] = locations_df['geometry'].y
1183
+ # locations_df['longitude'] = locations_df['geometry'].x
1184
+
1185
+ # if 'latitude' not in locations_df.columns or 'longitude' not in locations_df.columns:
1186
+ # st.error("Uploaded file is missing required 'latitude' or 'longitude' columns.")
1187
+ # else:
1188
+ # st.write("Preview of the uploaded points data:")
1189
+ # st.dataframe(locations_df.head())
1190
+ # m = leafmap.Map(center=[locations_df['latitude'].mean(), locations_df['longitude'].mean()], zoom=10)
1191
+ # for _, row in locations_df.iterrows():
1192
+ # latitude = row['latitude']
1193
+ # longitude = row['longitude']
1194
+ # if pd.isna(latitude) or pd.isna(longitude):
1195
+ # continue
1196
+ # m.add_marker(location=[latitude, longitude], popup=row.get('name', 'No Name'))
1197
+ # st.write("Map of Uploaded Points:")
1198
+ # m.to_streamlit()
1199
+ # st.session_state.map_data = m
1200
+
1201
+ # elif shape_type.lower() == "polygon":
1202
+ # if file_upload.name.endswith('.csv'):
1203
+ # locations_df = pd.read_csv(file_upload)
1204
+ # elif file_upload.name.endswith('.geojson'):
1205
+ # locations_df = gpd.read_file(file_upload)
1206
+ # elif file_upload.name.endswith('.kml'):
1207
+ # locations_df = gpd.read_file(file_upload)
1208
+ # else:
1209
+ # st.error("Unsupported file format. Please upload CSV, GeoJSON, or KML.")
1210
+ # locations_df = pd.DataFrame()
1211
+
1212
+ # if 'geometry' in locations_df.columns:
1213
+ # if locations_df.geometry.geom_type.isin(['Point', 'MultiPoint']).any():
1214
+ # st.warning("The uploaded file contains point data. Please select 'Point' for processing.")
1215
+ # st.stop()
1216
+
1217
+ # with st.spinner('Processing Map...'):
1218
+ # if locations_df is not None and not locations_df.empty:
1219
+ # if 'geometry' not in locations_df.columns:
1220
+ # st.error("Uploaded file is missing required 'geometry' column.")
1221
+ # else:
1222
+ # st.write("Preview of the uploaded polygons data:")
1223
+ # st.dataframe(locations_df.head())
1224
+ # centroid_lat = locations_df.geometry.centroid.y.mean()
1225
+ # centroid_lon = locations_df.geometry.centroid.x.mean()
1226
+ # m = leafmap.Map(center=[centroid_lat, centroid_lon], zoom=10)
1227
+ # for _, row in locations_df.iterrows():
1228
+ # polygon = row['geometry']
1229
+ # if polygon.is_valid:
1230
+ # gdf = gpd.GeoDataFrame([row], geometry=[polygon], crs=locations_df.crs)
1231
+ # m.add_gdf(gdf=gdf, layer_name=row.get('name', 'Unnamed Polygon'))
1232
+ # st.write("Map of Uploaded Polygons:")
1233
+ # m.to_streamlit()
1234
+ # st.session_state.map_data = m
1235
+
1236
+
1237
  # Ask user to upload a file based on shape type
1238
  file_upload = st.file_uploader(f"Upload your {shape_type} data (CSV, GeoJSON, KML)", type=["csv", "geojson", "kml"])
1239
 
 
1245
  elif file_upload.name.endswith('.geojson'):
1246
  locations_df = gpd.read_file(file_upload)
1247
  elif file_upload.name.endswith('.kml'):
1248
+ # Parse KML file for point data
1249
+ kml_string = file_upload.read().decode('utf-8')
1250
+ try:
1251
+ # Use xml.etree.ElementTree with unique alias
1252
+ root = XET.fromstring(kml_string)
1253
+ ns = {'kml': 'http://www.opengis.net/kml/2.2'}
1254
+ points = []
1255
+ for placemark in root.findall('.//kml:Placemark', ns):
1256
+ name = placemark.findtext('kml:name', default=f"Point_{len(points)}", namespaces=ns)
1257
+ coords_elem = placemark.find('.//kml:Point/kml:coordinates', ns)
1258
+ if coords_elem is not None:
1259
+ coords_text = coords_elem.text.strip()
1260
+ st.write(f"Debug: Point coordinates found - {coords_text}") # Debug output
1261
+ coords = [c.strip() for c in coords_text.split(',')]
1262
+ if len(coords) >= 2: # Ensure at least lon, lat
1263
+ lon, lat = float(coords[0]), float(coords[1])
1264
+ points.append({'name': name, 'geometry': f"POINT ({lon} {lat})"})
1265
+ if not points:
1266
+ st.error("No valid Point data found in the KML file.")
1267
+ locations_df = pd.DataFrame()
1268
+ else:
1269
+ locations_df = gpd.GeoDataFrame(points, geometry=gpd.GeoSeries.from_wkt([p['geometry'] for p in points]), crs="EPSG:4326")
1270
+ except Exception as e:
1271
+ st.error(f"Error parsing KML file: {str(e)}")
1272
+ locations_df = pd.DataFrame()
1273
  else:
1274
  st.error("Unsupported file format. Please upload CSV, GeoJSON, or KML.")
1275
  locations_df = pd.DataFrame()
 
1307
  elif file_upload.name.endswith('.geojson'):
1308
  locations_df = gpd.read_file(file_upload)
1309
  elif file_upload.name.endswith('.kml'):
1310
+ # Parse KML file for polygon data
1311
+ kml_string = file_upload.read().decode('utf-8')
1312
+ try:
1313
+ root = XET.fromstring(kml_string)
1314
+ ns = {'kml': 'http://www.opengis.net/kml/2.2'}
1315
+ polygons = []
1316
+ for placemark in root.findall('.//kml:Placemark', ns):
1317
+ name = placemark.findtext('kml:name', default=f"Polygon_{len(polygons)}", namespaces=ns)
1318
+ coords_elem = placemark.find('.//kml:Polygon//kml:coordinates', ns)
1319
+ if coords_elem is not None:
1320
+ coords_text = ' '.join(coords_elem.text.split()) # Normalize whitespace
1321
+ st.write(f"Debug: Polygon coordinates found - {coords_text}") # Debug output
1322
+ coord_pairs = [pair.split(',')[:2] for pair in coords_text.split() if pair]
1323
+ if len(coord_pairs) >= 4: # Minimum 4 points for a closed polygon
1324
+ coords_str = " ".join([f"{float(lon)} {float(lat)}" for lon, lat in coord_pairs])
1325
+ polygons.append({'name': name, 'geometry': f"POLYGON (({coords_str}))"})
1326
+ if not polygons:
1327
+ st.error("No valid Polygon data found in the KML file.")
1328
+ locations_df = pd.DataFrame()
1329
+ else:
1330
+ locations_df = gpd.GeoDataFrame(polygons, geometry=gpd.GeoSeries.from_wkt([p['geometry'] for p in polygons]), crs="EPSG:4326")
1331
+ except Exception as e:
1332
+ st.error(f"Error parsing KML file: {str(e)}")
1333
+ locations_df = pd.DataFrame()
1334
  else:
1335
  st.error("Unsupported file format. Please upload CSV, GeoJSON, or KML.")
1336
  locations_df = pd.DataFrame()
 
1359
  m.to_streamlit()
1360
  st.session_state.map_data = m
1361
 
1362
+ # ... (Rest of the code until convert_to_ee_geometry) ...
1363
  # Initialize session state for storing results
1364
  if 'results' not in st.session_state:
1365
  st.session_state.results = []