File size: 15,750 Bytes
ffd6783
 
f4b0045
d5a8738
2fd3301
3a4f9a5
60b1538
0db3082
7a81711
 
a5d11e6
b977014
a5d11e6
21e0c5d
5dec096
0db3082
 
21e0c5d
 
 
 
 
0db3082
 
 
 
 
 
 
 
 
 
 
 
 
 
a5d11e6
14bdf12
f4b0045
 
14bdf12
f4b0045
 
 
cf7d33a
f4b0045
cf7d33a
13769e5
e070e1d
 
c04779d
5dec096
8299706
ffd6783
7483788
e070e1d
0db3082
c16c7db
 
 
fcba768
cf7d33a
 
 
7483788
cf7d33a
 
9d458b0
 
 
7483788
 
fe61a21
7483788
fe61a21
7483788
fe61a21
7483788
cf7d33a
9d458b0
cf7d33a
da99172
0ea6b9c
da99172
0ea6b9c
da99172
 
 
 
0ea6b9c
da99172
0ea6b9c
36bd00d
 
0db3082
 
36bd00d
 
 
0db3082
 
36bd00d
 
 
0db3082
 
63b66dd
7483788
36bd00d
 
7483788
36bd00d
 
 
 
 
 
 
 
7b116ed
36bd00d
14bdf12
 
 
 
 
 
 
d3059d1
 
 
 
 
 
 
 
e975296
d3059d1
 
a957113
d3059d1
 
 
 
 
 
 
 
e975296
d3059d1
 
ab5b14b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
df2c46c
e975296
 
 
 
7483788
e975296
7483788
 
e975296
 
 
 
 
 
 
 
7483788
e975296
 
 
 
 
 
df2c46c
 
e975296
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
df2c46c
 
 
 
 
e975296
df2c46c
 
 
e975296
df2c46c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
da99172
 
 
 
 
6c93662
da99172
 
6c93662
da99172
 
 
 
 
 
 
 
 
6c93662
da99172
 
 
6c93662
da99172
 
4ac4e90
0ea6b9c
 
 
 
4ac4e90
0ea6b9c
da99172
 
 
 
 
df2c46c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0ea6b9c
ee13934
 
 
 
 
53916c0
 
 
 
 
 
 
 
 
ee13934
 
 
 
 
53916c0
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
import streamlit as st
import json
import ee
import os
import pandas as pd
import geopandas as gpd
from datetime import datetime
import leafmap.foliumap as leafmap
import time
import re

# Set up the page layout
st.set_page_config(layout="wide")

# Custom button styling
m = st.markdown(
    """
    <style>
    div.stButton > button:first-child {
        background-color: #006400;
        color:#ffffff;
    }
    </style>""",
    unsafe_allow_html=True,
)

# Logo
st.write(
    f"""
    <div style="display: flex; justify-content: space-between; align-items: center;">
        <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/ISRO_Logo.png"  style="width: 20%; margin-right: auto;">
        <img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/SAC_Logo.png"  style="width: 20%; margin-left: auto;">
    </div>
    """,
    unsafe_allow_html=True,
)

# Authenticate and initialize Earth Engine
earthengine_credentials = os.environ.get("EE_Authentication")

# Initialize Earth Engine with secret credentials
os.makedirs(os.path.expanduser("~/.config/earthengine/"), exist_ok=True)
with open(os.path.expanduser("~/.config/earthengine/credentials"), "w") as f:
    f.write(earthengine_credentials)

ee.Initialize(project='ee-yashsacisro24')

# Load Sentinel dataset options from JSON file
with open("sentinel_datasets.json") as f:
    data = json.load(f)

# Display the title and dataset selection
st.title("Sentinel Dataset")

# Select dataset category and subcategory (case-insensitive selection)
main_selection = st.selectbox("Select Sentinel Dataset Category", list(data.keys()))

if main_selection:
    sub_options = data[main_selection]["sub_options"]
    sub_selection = st.selectbox("Select Specific Dataset ID", list(sub_options.keys()))

# Earth Engine Index Calculator Section
st.header("Earth Engine Index Calculator")

# Choose Index or Custom Formula (case-insensitive)
index_choice = st.selectbox("Select an Index or Enter Custom Formula", ['NDVI', 'NDWI', 'Average NO₂', 'Custom Formula'])

# Initialize custom_formula variable
custom_formula = ""

# Display corresponding formula based on the index selected (case-insensitive)
if index_choice.lower() == 'ndvi':
    st.write("Formula for NDVI: NDVI = (B8 - B4) / (B8 + B4)")
elif index_choice.lower() == 'ndwi':
    st.write("Formula for NDWI: NDWI = (B3 - B8) / (B3 + B8)")
elif index_choice.lower() == 'average no₂':
    st.write("Formula for Average NO₂: Average NO₂ = Mean(NO2 band)")
elif index_choice.lower() == 'custom formula':
    custom_formula = st.text_input("Enter Custom Formula (e.g., 'B5 - B4 / B5 + B4')")
    st.write(f"Custom Formula: {custom_formula}")  # Display the custom formula after the user inputs it

# Function to check if the polygon geometry is valid and convert it to the correct format
def convert_to_ee_geometry(geometry):
    # Ensure the polygon geometry is in the right format
    if geometry.is_valid:
        # Convert the geometry to GeoJSON format
        geojson = geometry.__geo_interface__
        # Convert to Earth Engine geometry
        return ee.Geometry(geojson)
    else:
        raise ValueError("Invalid geometry: The polygon geometry is not valid.")

# Function to read points from CSV
def read_csv(file_path):
    df = pd.read_csv(file_path)
    return df

# Function to read points from GeoJSON
def read_geojson(file_path):
    gdf = gpd.read_file(file_path)
    return gdf

# Function to read points from KML
def read_kml(file_path):
    gdf = gpd.read_file(file_path, driver='KML')
    return gdf

# Ask user whether they want to process 'Point' or 'Polygon' data (case-insensitive)
shape_type = st.selectbox("Do you want to process 'Point' or 'Polygon' data?", ["Point", "Polygon"])

# Ask user to upload a file based on shape type (case-insensitive)
file_upload = st.file_uploader(f"Upload your {shape_type} data (CSV, GeoJSON, KML)", type=["csv", "geojson", "kml"])

# Date Input for Start and End Dates
start_date = st.date_input("Start Date", value=pd.to_datetime('2020-01-01'))
end_date = st.date_input("End Date", value=pd.to_datetime('2020-12-31'))

# Convert start_date and end_date to string format for Earth Engine
start_date_str = start_date.strftime('%Y-%m-%d')
end_date_str = end_date.strftime('%Y-%m-%d')

# Initialize session state for storing results if not already done
if 'results' not in st.session_state:
    st.session_state.results = []
if 'last_params' not in st.session_state:
    st.session_state.last_params = {}
if 'map_data' not in st.session_state:
    st.session_state.map_data = None  # Initialize map_data

# Function to check if parameters have changed
def parameters_changed():
    return (
        st.session_state.last_params.get('main_selection') != main_selection or
        st.session_state.last_params.get('sub_selection') != sub_selection or
        st.session_state.last_params.get('index_choice') != index_choice or
        st.session_state.last_params.get('start_date_str') != start_date_str or
        st.session_state.last_params.get('end_date_str') != end_date_str
    )

# If parameters have changed, reset the results
if parameters_changed():
    st.session_state.results = []  # Clear the previous results
    # Update the last parameters to the current ones
    st.session_state.last_params = {
        'main_selection': main_selection,
        'sub_selection': sub_selection,
        'index_choice': index_choice,
        'start_date_str': start_date_str,
        'end_date_str': end_date_str
    }

# Function to perform index calculations
def calculate_ndvi(image, geometry):
    ndvi = image.normalizedDifference(['B8', 'B4']).rename('NDVI')
    result = ndvi.reduceRegion(
        reducer=ee.Reducer.mean(),
        geometry=geometry,
        scale=30
    )
    return result.get('NDVI')

def calculate_ndwi(image, geometry):
    ndwi = image.normalizedDifference(['B3', 'B8']).rename('NDWI')
    result = ndwi.reduceRegion(
        reducer=ee.Reducer.mean(),
        geometry=geometry,
        scale=30
    )
    return result.get('NDWI')

def calculate_avg_no2_sentinel5p(image, geometry):
    no2 = image.select('NO2').reduceRegion(
        reducer=ee.Reducer.mean(),
        geometry=geometry,
        scale=1000
    ).get('NO2')
    return no2

def calculate_custom_formula(image, geometry, formula):
    result = image.expression(formula).rename('Custom Index').reduceRegion(
        reducer=ee.Reducer.mean(),
        geometry=geometry,
        scale=30
    )
    return result.get('Custom Index')

# Process each point or polygon
if file_upload:
    locations_df = None  # Initialize locations_df to None
    polygons_df = None   # Initialize polygons_df to None

    file_extension = os.path.splitext(file_upload.name)[1].lower()  # Convert extension to lowercase

    # Read file based on shape type (case-insensitive)
    if shape_type.lower() == 'point':
        if file_extension == '.csv':
            locations_df = read_csv(file_upload)
        elif file_extension == '.geojson':
            locations_df = read_geojson(file_upload)
        elif file_extension == '.kml':
            locations_df = read_kml(file_upload)
        else:
            st.error("Unsupported file type. Please upload a CSV, GeoJSON, or KML file for points.")
    elif shape_type.lower() == 'polygon':
        if file_extension == '.geojson':
            polygons_df = read_geojson(file_upload)
        elif file_extension == '.kml':
            polygons_df = read_kml(file_upload)
        else:
            st.error("Unsupported file type. Please upload a GeoJSON or KML file for polygons.")
    
    # Check if locations_df is populated for points
    if locations_df is not None:
        # Display a preview of the points data
        st.write("Preview of the uploaded points data:")
        st.dataframe(locations_df.head())

        # Create a LeafMap object to display the points
        m = leafmap.Map(center=[locations_df['latitude'].mean(), locations_df['longitude'].mean()], zoom=10)

        # Add points to the map using a loop
        for _, row in locations_df.iterrows():
            latitude = row['latitude']
            longitude = row['longitude']
            
            # Check if latitude or longitude are NaN and skip if they are
            if pd.isna(latitude) or pd.isna(longitude):
                continue  # Skip this row and move to the next one
            
            m.add_marker(location=[latitude, longitude], popup=row.get('name', 'No Name'))

        # Display map
        st.write("Map of Uploaded Points:")
        m.to_streamlit()

        # Store the map in session_state
        st.session_state.map_data = m

        # Process each point for index calculation
        for idx, row in locations_df.iterrows():
            latitude = row['latitude']
            longitude = row['longitude']
            location_name = row.get('name', f"Point_{idx}")

            # Skip processing if latitude or longitude is NaN
            if pd.isna(latitude) or pd.isna(longitude):
                continue  # Skip this row and move to the next one

            # Define the region of interest (ROI)
            roi = ee.Geometry.Point([longitude, latitude])

            # Load Sentinel-2 image collection
            collection = ee.ImageCollection(sub_options[sub_selection]) \
                .filterDate(ee.Date(start_date_str), ee.Date(end_date_str)) \
                .filterBounds(roi)

            # Check if the collection has images for the selected date range
            image_count = collection.size().getInfo()
            if image_count == 0:
                st.warning(f"No images found for {location_name}.")
            else:
                st.write(f"Found {image_count} images for {location_name}.")
                image = collection.first()

                # Perform the calculation based on user selection
                result = None
                if index_choice.lower() == 'ndvi':
                    result = calculate_ndvi(image, roi)
                elif index_choice.lower() == 'ndwi':
                    result = calculate_ndwi(image, roi)
                elif index_choice.lower() == 'average no₂':
                    if 'NO2' in image.bandNames().getInfo():
                        result = calculate_avg_no2_sentinel5p(image, roi)
                    else:
                        st.warning(f"No NO2 band found for {location_name}. Please use Sentinel-5P for NO₂ data.")
                elif index_choice.lower() == 'custom formula' and custom_formula:
                    result = calculate_custom_formula(image, roi, custom_formula)

                if result is not None:
                    # Only store the numeric value (not the dictionary structure)
                    calculated_value = result.getInfo()  # Get the numeric value

                    # Store the result in session state
                    st.session_state.results.append({
                        'Location Name': location_name,
                        'Latitude': latitude,
                        'Longitude': longitude,
                        'Calculated Value': calculated_value
                    })

    # Check if polygons_df is populated for polygons
    if polygons_df is not None:
        # Display a preview of the polygons data
        st.write("Preview of the uploaded polygons data:")
        st.dataframe(polygons_df.head())

        # Create a LeafMap object to display the polygons
        m = leafmap.Map(center=[polygons_df.geometry.centroid.y.mean(), polygons_df.geometry.centroid.x.mean()], zoom=10)

        # Add polygons to the map
        for _, row in polygons_df.iterrows():
            polygon = row['geometry']
            if polygon.is_valid:  # Check if the geometry is valid
                # Create a GeoDataFrame with the single row
                gdf = gpd.GeoDataFrame([row], geometry=[polygon], crs=polygons_df.crs)
                
                # Add the valid GeoDataFrame to the map
                m.add_gdf(gdf=gdf, layer_name=row.get('name', 'Unnamed Polygon'))

        # Display map
        st.write("Map of Uploaded Polygons:")
        m.to_streamlit()

        # Store the map in session_state
        st.session_state.map_data = m

        # Process each polygon for index calculation
        for idx, row in polygons_df.iterrows():
            polygon = row['geometry']
            location_name = row.get('name', f"Polygon_{idx}")

            # Define the region of interest (ROI)
            try:
                roi = convert_to_ee_geometry(polygon)
            except ValueError as e:
                st.error(str(e))
                continue  # Skip this polygon if geometry is invalid

            # Load Sentinel-2 image collection
            collection = ee.ImageCollection(sub_options[sub_selection]) \
                .filterDate(ee.Date(start_date_str), ee.Date(end_date_str)) \
                .filterBounds(roi)

            # Check if the collection has images for the selected date range
            image_count = collection.size().getInfo()
            if image_count == 0:
                st.warning(f"No images found for {location_name}.")
            else:
                st.write(f"Found {image_count} images for {location_name}.")
                image = collection.first()

                # Perform the calculation based on user selection
                result = None
                if index_choice.lower() == 'ndvi':
                    result = calculate_ndvi(image, roi)
                elif index_choice.lower() == 'ndwi':
                    result = calculate_ndwi(image, roi)
                elif index_choice.lower() == 'average no₂':
                    if 'NO2' in image.bandNames().getInfo():
                        result = calculate_avg_no2_sentinel5p(image, roi)
                    else:
                        st.warning(f"No NO2 band found for {location_name}. Please use Sentinel-5P for NO₂ data.")
                elif index_choice.lower() == 'custom formula' and custom_formula:
                    result = calculate_custom_formula(image, roi, custom_formula)

                if result is not None:
                    # Only store the numeric value (not the dictionary structure)
                    calculated_value = result.getInfo()  # Get the numeric value

                    # Store the result in session state
                    st.session_state.results.append({
                        'Location Name': location_name,
                        'Calculated Value': calculated_value
                    })

# After processing, show the results
if st.session_state.results:
    # Convert the results to a DataFrame for better visualization
    result_df = pd.DataFrame(st.session_state.results)

    # If the shape type is 'Point', include 'Latitude' and 'Longitude'
    if shape_type.lower() == 'point':
        # Show the results in a table format with Latitude and Longitude
        st.write("Processed Results Table (Points):")
        st.dataframe(result_df[['Location Name', 'Latitude', 'Longitude', 'Calculated Value']])
    else:
        # For polygons, we only show the Location Name and Calculated Value
        st.write("Processed Results Table (Polygons):")
        st.dataframe(result_df[['Location Name', 'Calculated Value']])

    # Generate the dynamic filename
    filename = f"{main_selection}_{sub_selection}_{start_date.strftime('%Y/%m/%d')}_{end_date.strftime('%Y/%m/%d')}_{shape_type}.csv"

    # Convert results to DataFrame for download
    st.download_button(
        label="Download results as CSV",
        data=result_df.to_csv(index=False).encode('utf-8'),
        file_name=filename,
        mime='text/csv'
    )