GEE_Calculator / app.py
YashMK89's picture
update app.py
a957113 verified
raw
history blame
12.9 kB
import streamlit as st
import json
import ee
import os
import pandas as pd
import geopandas as gpd
from datetime import datetime
import leafmap.foliumap as leafmap
import time
import re
# Set up the page layout
st.set_page_config(layout="wide")
# Custom button styling
m = st.markdown(
"""
<style>
div.stButton > button:first-child {
background-color: #006400;
color:#ffffff;
}
</style>""",
unsafe_allow_html=True,
)
# Logo
st.write(
f"""
<div style="display: flex; justify-content: space-between; align-items: center;">
<img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/ISRO_Logo.png" style="width: 20%; margin-right: auto;">
<img src="https://huggingface.co/spaces/YashMK89/GEE_Calculator/resolve/main/SAC_Logo.png" style="width: 20%; margin-left: auto;">
</div>
""",
unsafe_allow_html=True,
)
# Authenticate and initialize Earth Engine
earthengine_credentials = os.environ.get("EE_Authentication")
# Initialize Earth Engine with secret credentials
os.makedirs(os.path.expanduser("~/.config/earthengine/"), exist_ok=True)
with open(os.path.expanduser("~/.config/earthengine/credentials"), "w") as f:
f.write(earthengine_credentials)
ee.Initialize(project='ee-yashsacisro24')
# Load Sentinel dataset options from JSON file
with open("sentinel_datasets.json") as f:
data = json.load(f)
# Display the title and dataset selection
st.title("Sentinel Dataset")
# Select dataset category and subcategory
main_selection = st.selectbox("Select Sentinel Dataset Category", list(data.keys()))
if main_selection:
sub_options = data[main_selection]["sub_options"]
sub_selection = st.selectbox("Select Specific Dataset ID", list(sub_options.keys()))
# Earth Engine Index Calculator Section
st.header("Earth Engine Index Calculator")
# Choose Index or Custom Formula
index_choice = st.selectbox("Select an Index or Enter Custom Formula", ['NDVI', 'NDWI', 'Average NO₂', 'Custom Formula'])
# Initialize custom_formula variable
custom_formula = ""
# Display corresponding formula based on the index selected
if index_choice == 'NDVI':
st.write("Formula for NDVI: NDVI = (B8 - B4) / (B8 + B4)")
elif index_choice == 'NDWI':
st.write("Formula for NDWI: NDWI = (B3 - B8) / (B3 + B8)")
elif index_choice == 'Average NO₂':
st.write("Formula for Average NO₂: Average NO₂ = Mean(NO2 band)")
elif index_choice == 'Custom Formula':
custom_formula = st.text_input("Enter Custom Formula (e.g., 'B5 - B4 / B5 + B4')")
st.write(f"Custom Formula: {custom_formula}") # Display the custom formula after the user inputs it
# Function to read points from CSV
def read_csv(file_path):
df = pd.read_csv(file_path)
return df
# Function to read points from GeoJSON
def read_geojson(file_path):
gdf = gpd.read_file(file_path)
return gdf
# Function to read points from KML
def read_kml(file_path):
gdf = gpd.read_file(file_path, driver='KML')
return gdf
# Ask user whether they want to process 'Point' or 'Polygon' data
shape_type = st.selectbox("Do you want to process 'Point' or 'Polygon' data?", ["Point", "Polygon"])
# Ask user to upload a file based on shape type
file_upload = st.file_uploader(f"Upload your {shape_type} data (CSV, GeoJSON, KML)", type=["csv", "geojson", "kml"])
# Date Input for Start and End Dates
start_date = st.date_input("Start Date", value=pd.to_datetime('2020-01-01'))
end_date = st.date_input("End Date", value=pd.to_datetime('2020-12-31'))
# Convert start_date and end_date to string format for Earth Engine
start_date_str = start_date.strftime('%Y-%m-%d')
end_date_str = end_date.strftime('%Y-%m-%d')
# Initialize session state for storing results if not already done
if 'results' not in st.session_state:
st.session_state.results = []
if 'last_params' not in st.session_state:
st.session_state.last_params = {}
if 'map_data' not in st.session_state:
st.session_state.map_data = None # Initialize map_data
# Function to check if parameters have changed
def parameters_changed():
return (
st.session_state.last_params.get('main_selection') != main_selection or
st.session_state.last_params.get('sub_selection') != sub_selection or
st.session_state.last_params.get('index_choice') != index_choice or
st.session_state.last_params.get('start_date_str') != start_date_str or
st.session_state.last_params.get('end_date_str') != end_date_str
)
# If parameters have changed, reset the results
if parameters_changed():
st.session_state.results = [] # Clear the previous results
# Update the last parameters to the current ones
st.session_state.last_params = {
'main_selection': main_selection,
'sub_selection': sub_selection,
'index_choice': index_choice,
'start_date_str': start_date_str,
'end_date_str': end_date_str
}
# Function to perform index calculations
def calculate_ndvi(image, geometry):
ndvi = image.normalizedDifference(['B8', 'B4']).rename('NDVI')
result = ndvi.reduceRegion(
reducer=ee.Reducer.mean(),
geometry=geometry,
scale=30
)
return result.get('NDVI')
def calculate_ndwi(image, geometry):
ndwi = image.normalizedDifference(['B3', 'B8']).rename('NDWI')
result = ndwi.reduceRegion(
reducer=ee.Reducer.mean(),
geometry=geometry,
scale=30
)
return result.get('NDWI')
def calculate_avg_no2_sentinel5p(image, geometry):
no2 = image.select('NO2').reduceRegion(
reducer=ee.Reducer.mean(),
geometry=geometry,
scale=1000
).get('NO2')
return no2
def calculate_custom_formula(image, geometry, formula):
result = image.expression(formula).rename('Custom Index').reduceRegion(
reducer=ee.Reducer.mean(),
geometry=geometry,
scale=30
)
return result.get('Custom Index')
# Function to sanitize the description for export
def sanitize_description(description):
# Replace spaces and other invalid characters with underscores
sanitized = re.sub(r'[^a-zA-Z0-9._-]', '_', description)
# Ensure the description is no longer than 100 characters
sanitized = sanitized[:100]
return sanitized
# Process each point
if file_upload:
locations_df = None # Initialize locations_df to None
polygons_df = None # Initialize polygons_df to None
file_extension = os.path.splitext(file_upload.name)[1].lower()
# Read file based on shape type
if shape_type == 'Point':
if file_extension == '.csv':
locations_df = read_csv(file_upload)
elif file_extension == '.geojson':
locations_df = read_geojson(file_upload)
elif file_extension == '.kml':
locations_df = read_kml(file_upload)
else:
st.error("Unsupported file type. Please upload a CSV, GeoJSON, or KML file for points.")
elif shape_type == 'Polygon':
if file_extension == '.geojson':
polygons_df = read_geojson(file_upload)
elif file_extension == '.kml':
polygons_df = read_kml(file_upload)
else:
st.error("Unsupported file type. Please upload a GeoJSON or KML file for polygons.")
if locations_df is not None:
# Display a preview of the points data
st.write("Preview of the uploaded points data:")
st.dataframe(locations_df.head())
# Create a LeafMap object to display the points
m = leafmap.Map(center=[locations_df['latitude'].mean(), locations_df['longitude'].mean()], zoom=10)
# Add points to the map using a loop
for _, row in locations_df.iterrows():
latitude = row['latitude']
longitude = row['longitude']
# Check if latitude or longitude are NaN and skip if they are
if pd.isna(latitude) or pd.isna(longitude):
continue # Skip this row and move to the next one
m.add_marker(location=[latitude, longitude], popup=row.get('name', 'No Name'))
# Display map
st.write("Map of Uploaded Points:")
m.to_streamlit()
# Store the map in session_state
st.session_state.map_data = m
# Process each point for index calculation
for idx, row in locations_df.iterrows():
latitude = row['latitude']
longitude = row['longitude']
location_name = row.get('name', f"Location_{idx}")
# Skip processing if latitude or longitude is NaN
if pd.isna(latitude) or pd.isna(longitude):
continue # Skip this row and move to the next one
# Define the region of interest (ROI)
roi = ee.Geometry.Point([longitude, latitude])
# Load Sentinel-2 image collection
collection = ee.ImageCollection(sub_options[sub_selection]) \
.filterDate(ee.Date(start_date_str), ee.Date(end_date_str)) \
.filterBounds(roi)
# Check if the collection has images for the selected date range
image_count = collection.size().getInfo()
if image_count == 0:
st.warning(f"No images found for {location_name}.")
else:
st.write(f"Found {image_count} images for {location_name}.")
image = collection.first()
# Perform the calculation based on user selection
result = None
if index_choice == 'NDVI':
result = calculate_ndvi(image, roi)
elif index_choice == 'NDWI':
result = calculate_ndwi(image, roi)
elif index_choice == 'Average NO₂':
if 'NO2' in image.bandNames().getInfo():
result = calculate_avg_no2_sentinel5p(image, roi)
else:
st.warning(f"No NO2 band found for {location_name}. Please use Sentinel-5P for NO₂ data.")
elif index_choice == 'Custom Formula' and custom_formula:
result = calculate_custom_formula(image, roi, custom_formula)
if result is not None:
# Only store the numeric value (not the dictionary structure)
calculated_value = result.getInfo() # Get the numeric value
# Store the result in session state
st.session_state.results.append({
'Location Name': location_name,
'Latitude': latitude,
'Longitude': longitude,
'Calculated Value': calculated_value
})
# Sanitize the location name for use in the export task
sanitized_location_name = sanitize_description(location_name)
# Now allow user to download the image
st.write(f"Click below to download the image for {sanitized_location_name}:")
# Export image as GeoTIFF
export_image = image.clip(roi) # Clip the image to the region of interest (ROI)
export_task = ee.batch.Export.image.toDrive(
image=export_image,
description=sanitized_location_name,
folder='Sentinel_Images',
fileNamePrefix=f'{sanitized_location_name}_image',
region=roi,
scale=30,
fileFormat='GeoTIFF',
crs='EPSG:4326'
)
export_task.start()
# Show export status
st.info(f"Exporting image for {sanitized_location_name}. This might take some time...")
# Poll for the task status (to check if export is finished)
while export_task.active():
time.sleep(5)
st.write("Export in progress...")
st.write(f"Export complete! The image is ready for download.")
# Generate a link to the exported image in Google Drive
# Replace 'your_google_drive_folder_link' with the actual folder ID in your Drive
st.write(f"Click below to download the GeoTIFF image for {sanitized_location_name}:")
st.download_button(
label=f"Download {sanitized_location_name} Image",
data=export_image.getDownloadURL(),
file_name=f'{sanitized_location_name}_image.tif'
)
# After processing, show the results
if st.session_state.results:
# Convert the results to a DataFrame for better visualization
result_df = pd.DataFrame(st.session_state.results)
# Show the results in a table format
st.write("Processed Results Table:")
st.dataframe(result_df[['Location Name', 'Latitude', 'Longitude', 'Calculated Value']])
# Allow downloading of the results as CSV
st.download_button(
label="Download results as CSV",
data=result_df.to_csv(index=False).encode('utf-8'),
file_name="calculated_results.csv",
mime='text/csv'
)