Spaces:
Sleeping
Sleeping
import os | |
import argparse | |
import logging | |
import pickle | |
import threading | |
import time | |
from datetime import datetime, timedelta | |
from collections import defaultdict | |
import csv | |
import gradio as gr | |
import pandas as pd | |
import numpy as np | |
import matplotlib.pyplot as plt | |
import matplotlib.animation as animation | |
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas | |
import cartopy.crs as ccrs | |
import cartopy.feature as cfeature | |
import plotly.graph_objects as go | |
import plotly.express as px | |
from plotly.subplots import make_subplots | |
from sklearn.manifold import TSNE | |
from sklearn.cluster import DBSCAN | |
from sklearn.preprocessing import StandardScaler | |
from scipy.interpolate import interp1d | |
import statsmodels.api as sm | |
import requests | |
import tempfile | |
import shutil | |
import xarray as xr | |
try: | |
import cdsapi | |
CDSAPI_AVAILABLE = True | |
except ImportError: | |
CDSAPI_AVAILABLE = False | |
import tropycal.tracks as tracks | |
# ----------------------------- | |
# Configuration and Setup | |
# ----------------------------- | |
logging.basicConfig( | |
level=logging.INFO, | |
format='%(asctime)s - %(levelname)s - %(message)s' | |
) | |
# Remove argument parser to simplify startup | |
DATA_PATH = '/tmp/typhoon_data' if 'SPACE_ID' in os.environ else tempfile.gettempdir() | |
# Ensure directory exists and is writable | |
try: | |
os.makedirs(DATA_PATH, exist_ok=True) | |
# Test write permissions | |
test_file = os.path.join(DATA_PATH, 'test_write.txt') | |
with open(test_file, 'w') as f: | |
f.write('test') | |
os.remove(test_file) | |
logging.info(f"Data directory is writable: {DATA_PATH}") | |
except Exception as e: | |
logging.warning(f"Data directory not writable, using temp dir: {e}") | |
DATA_PATH = tempfile.mkdtemp() | |
logging.info(f"Using temporary directory: {DATA_PATH}") | |
# Update file paths | |
ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv') | |
TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv') | |
MERGED_DATA_CSV = os.path.join(DATA_PATH, 'merged_typhoon_era5_data.csv') | |
# IBTrACS settings | |
BASIN_FILES = { | |
'EP': 'ibtracs.EP.list.v04r01.csv', | |
'NA': 'ibtracs.NA.list.v04r01.csv', | |
'WP': 'ibtracs.WP.list.v04r01.csv' | |
} | |
IBTRACS_BASE_URL = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/' | |
LOCAL_IBTRACS_PATH = os.path.join(DATA_PATH, 'ibtracs.WP.list.v04r01.csv') | |
CACHE_FILE = os.path.join(DATA_PATH, 'ibtracs_cache.pkl') | |
CACHE_EXPIRY_DAYS = 1 | |
# ----------------------------- | |
# Color Maps and Standards | |
# ----------------------------- | |
color_map = { | |
'C5 Super Typhoon': 'rgb(255, 0, 0)', | |
'C4 Very Strong Typhoon': 'rgb(255, 165, 0)', | |
'C3 Strong Typhoon': 'rgb(255, 255, 0)', | |
'C2 Typhoon': 'rgb(0, 255, 0)', | |
'C1 Typhoon': 'rgb(0, 255, 255)', | |
'Tropical Storm': 'rgb(0, 0, 255)', | |
'Tropical Depression': 'rgb(128, 128, 128)' | |
} | |
atlantic_standard = { | |
'C5 Super Typhoon': {'wind_speed': 137, 'color': 'Red', 'hex': '#FF0000'}, | |
'C4 Very Strong Typhoon': {'wind_speed': 113, 'color': 'Orange', 'hex': '#FFA500'}, | |
'C3 Strong Typhoon': {'wind_speed': 96, 'color': 'Yellow', 'hex': '#FFFF00'}, | |
'C2 Typhoon': {'wind_speed': 83, 'color': 'Green', 'hex': '#00FF00'}, | |
'C1 Typhoon': {'wind_speed': 64, 'color': 'Cyan', 'hex': '#00FFFF'}, | |
'Tropical Storm': {'wind_speed': 34, 'color': 'Blue', 'hex': '#0000FF'}, | |
'Tropical Depression': {'wind_speed': 0, 'color': 'Gray', 'hex': '#808080'} | |
} | |
taiwan_standard = { | |
'Strong Typhoon': {'wind_speed': 51.0, 'color': 'Red', 'hex': '#FF0000'}, | |
'Medium Typhoon': {'wind_speed': 33.7, 'color': 'Orange', 'hex': '#FFA500'}, | |
'Mild Typhoon': {'wind_speed': 17.2, 'color': 'Yellow', 'hex': '#FFFF00'}, | |
'Tropical Depression': {'wind_speed': 0, 'color': 'Gray', 'hex': '#808080'} | |
} | |
# ----------------------------- | |
# Utility Functions for HF Spaces | |
# ----------------------------- | |
def safe_file_write(file_path, data_frame, backup_dir=None): | |
"""Safely write DataFrame to CSV with backup and error handling""" | |
try: | |
# Create directory if it doesn't exist | |
os.makedirs(os.path.dirname(file_path), exist_ok=True) | |
# Try to write to a temporary file first | |
temp_path = file_path + '.tmp' | |
data_frame.to_csv(temp_path, index=False) | |
# If successful, rename to final file | |
os.rename(temp_path, file_path) | |
logging.info(f"Successfully saved {len(data_frame)} records to {file_path}") | |
return True | |
except PermissionError as e: | |
logging.warning(f"Permission denied writing to {file_path}: {e}") | |
if backup_dir: | |
try: | |
backup_path = os.path.join(backup_dir, os.path.basename(file_path)) | |
data_frame.to_csv(backup_path, index=False) | |
logging.info(f"Saved to backup location: {backup_path}") | |
return True | |
except Exception as backup_e: | |
logging.error(f"Failed to save to backup location: {backup_e}") | |
return False | |
except Exception as e: | |
logging.error(f"Error saving file {file_path}: {e}") | |
# Clean up temp file if it exists | |
temp_path = file_path + '.tmp' | |
if os.path.exists(temp_path): | |
try: | |
os.remove(temp_path) | |
except: | |
pass | |
return False | |
def get_fallback_data_dir(): | |
"""Get a fallback data directory that's guaranteed to be writable""" | |
fallback_dirs = [ | |
tempfile.gettempdir(), | |
'/tmp', | |
os.path.expanduser('~'), | |
os.getcwd() | |
] | |
for directory in fallback_dirs: | |
try: | |
test_dir = os.path.join(directory, 'typhoon_fallback') | |
os.makedirs(test_dir, exist_ok=True) | |
test_file = os.path.join(test_dir, 'test.txt') | |
with open(test_file, 'w') as f: | |
f.write('test') | |
os.remove(test_file) | |
return test_dir | |
except: | |
continue | |
# If all else fails, use current directory | |
return os.getcwd() | |
# ----------------------------- | |
# ONI and Typhoon Data Functions | |
# ----------------------------- | |
def download_oni_file(url, filename): | |
"""Download ONI file with retry logic""" | |
max_retries = 3 | |
for attempt in range(max_retries): | |
try: | |
response = requests.get(url, timeout=30) | |
response.raise_for_status() | |
with open(filename, 'wb') as f: | |
f.write(response.content) | |
return True | |
except Exception as e: | |
logging.warning(f"Attempt {attempt + 1} failed to download ONI: {e}") | |
if attempt < max_retries - 1: | |
time.sleep(2 ** attempt) # Exponential backoff | |
else: | |
logging.error(f"Failed to download ONI after {max_retries} attempts") | |
return False | |
def convert_oni_ascii_to_csv(input_file, output_file): | |
"""Convert ONI ASCII format to CSV""" | |
data = defaultdict(lambda: [''] * 12) | |
season_to_month = {'DJF':12, 'JFM':1, 'FMA':2, 'MAM':3, 'AMJ':4, 'MJJ':5, | |
'JJA':6, 'JAS':7, 'ASO':8, 'SON':9, 'OND':10, 'NDJ':11} | |
try: | |
with open(input_file, 'r') as f: | |
lines = f.readlines()[1:] # Skip header | |
for line in lines: | |
parts = line.split() | |
if len(parts) >= 4: | |
season, year, anom = parts[0], parts[1], parts[-1] | |
if season in season_to_month: | |
month = season_to_month[season] | |
if season == 'DJF': | |
year = str(int(year)-1) | |
data[year][month-1] = anom | |
# Write to CSV with safe write | |
df = pd.DataFrame(data).T.reset_index() | |
df.columns = ['Year','Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'] | |
df = df.sort_values('Year').reset_index(drop=True) | |
return safe_file_write(output_file, df, get_fallback_data_dir()) | |
except Exception as e: | |
logging.error(f"Error converting ONI file: {e}") | |
return False | |
def update_oni_data(): | |
"""Update ONI data with error handling""" | |
url = "https://www.cpc.ncep.noaa.gov/data/indices/oni.ascii.txt" | |
temp_file = os.path.join(DATA_PATH, "temp_oni.ascii.txt") | |
input_file = os.path.join(DATA_PATH, "oni.ascii.txt") | |
output_file = ONI_DATA_PATH | |
try: | |
if download_oni_file(url, temp_file): | |
if not os.path.exists(input_file) or not os.path.exists(output_file): | |
os.rename(temp_file, input_file) | |
convert_oni_ascii_to_csv(input_file, output_file) | |
else: | |
os.remove(temp_file) | |
else: | |
# Create fallback ONI data if download fails | |
logging.warning("Creating fallback ONI data") | |
create_fallback_oni_data(output_file) | |
except Exception as e: | |
logging.error(f"Error updating ONI data: {e}") | |
create_fallback_oni_data(output_file) | |
def create_fallback_oni_data(output_file): | |
"""Create minimal ONI data for testing""" | |
years = range(2000, 2025) | |
months = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'] | |
# Create synthetic ONI data | |
data = [] | |
for year in years: | |
row = [year] | |
for month in months: | |
# Generate some realistic ONI values | |
value = np.random.normal(0, 1) * 0.5 | |
row.append(f"{value:.2f}") | |
data.append(row) | |
df = pd.DataFrame(data, columns=['Year'] + months) | |
safe_file_write(output_file, df, get_fallback_data_dir()) | |
# ----------------------------- | |
# FIXED: IBTrACS Data Loading | |
# ----------------------------- | |
def download_ibtracs_file(basin, force_download=False): | |
"""Download specific basin file from IBTrACS""" | |
filename = BASIN_FILES[basin] | |
local_path = os.path.join(DATA_PATH, filename) | |
url = IBTRACS_BASE_URL + filename | |
# Check if file exists and is recent (less than 7 days old) | |
if os.path.exists(local_path) and not force_download: | |
file_age = time.time() - os.path.getmtime(local_path) | |
if file_age < 7 * 24 * 3600: # 7 days | |
logging.info(f"Using cached {basin} basin file") | |
return local_path | |
try: | |
logging.info(f"Downloading {basin} basin file from {url}") | |
response = requests.get(url, timeout=60) | |
response.raise_for_status() | |
# Ensure directory exists | |
os.makedirs(os.path.dirname(local_path), exist_ok=True) | |
with open(local_path, 'wb') as f: | |
f.write(response.content) | |
logging.info(f"Successfully downloaded {basin} basin file") | |
return local_path | |
except Exception as e: | |
logging.error(f"Failed to download {basin} basin file: {e}") | |
return None | |
def examine_ibtracs_structure(file_path): | |
"""Examine the actual structure of an IBTrACS CSV file""" | |
try: | |
with open(file_path, 'r') as f: | |
lines = f.readlines() | |
# Show first 5 lines | |
logging.info("First 5 lines of IBTrACS file:") | |
for i, line in enumerate(lines[:5]): | |
logging.info(f"Line {i}: {line.strip()}") | |
# The first line contains the actual column headers | |
# No need to skip rows for IBTrACS v04r01 | |
df = pd.read_csv(file_path, nrows=5) | |
logging.info(f"Columns from first row: {list(df.columns)}") | |
return list(df.columns) | |
except Exception as e: | |
logging.error(f"Error examining IBTrACS structure: {e}") | |
return None | |
def load_ibtracs_csv_directly(basin='WP'): | |
"""Load IBTrACS data directly from CSV - FIXED VERSION""" | |
filename = BASIN_FILES[basin] | |
local_path = os.path.join(DATA_PATH, filename) | |
# Download if not exists | |
if not os.path.exists(local_path): | |
downloaded_path = download_ibtracs_file(basin) | |
if not downloaded_path: | |
return None | |
try: | |
# First, examine the structure | |
actual_columns = examine_ibtracs_structure(local_path) | |
if not actual_columns: | |
logging.error("Could not examine IBTrACS file structure") | |
return None | |
# Read IBTrACS CSV - DON'T skip any rows for v04r01 | |
# The first row contains proper column headers | |
logging.info(f"Reading IBTrACS CSV file: {local_path}") | |
df = pd.read_csv(local_path, low_memory=False) # Don't skip any rows | |
logging.info(f"Original columns: {list(df.columns)}") | |
logging.info(f"Data shape before cleaning: {df.shape}") | |
# Check which essential columns exist | |
required_cols = ['SID', 'ISO_TIME', 'LAT', 'LON'] | |
available_required = [col for col in required_cols if col in df.columns] | |
if len(available_required) < 2: | |
logging.error(f"Missing critical columns. Available: {list(df.columns)}") | |
return None | |
# Clean and standardize the data with format specification | |
if 'ISO_TIME' in df.columns: | |
df['ISO_TIME'] = pd.to_datetime(df['ISO_TIME'], format='%Y-%m-%d %H:%M:%S', errors='coerce') | |
# Clean numeric columns | |
numeric_columns = ['LAT', 'LON', 'WMO_WIND', 'WMO_PRES', 'USA_WIND', 'USA_PRES'] | |
for col in numeric_columns: | |
if col in df.columns: | |
df[col] = pd.to_numeric(df[col], errors='coerce') | |
# Filter out invalid/missing critical data | |
valid_rows = df['LAT'].notna() & df['LON'].notna() | |
df = df[valid_rows] | |
# Ensure LAT/LON are in reasonable ranges | |
df = df[(df['LAT'] >= -90) & (df['LAT'] <= 90)] | |
df = df[(df['LON'] >= -180) & (df['LON'] <= 180)] | |
# Add basin info if missing | |
if 'BASIN' not in df.columns: | |
df['BASIN'] = basin | |
# Add default columns if missing | |
if 'NAME' not in df.columns: | |
df['NAME'] = 'UNNAMED' | |
if 'SEASON' not in df.columns and 'ISO_TIME' in df.columns: | |
df['SEASON'] = df['ISO_TIME'].dt.year | |
logging.info(f"Successfully loaded {len(df)} records from {basin} basin") | |
return df | |
except Exception as e: | |
logging.error(f"Error reading IBTrACS CSV file: {e}") | |
return None | |
def load_ibtracs_data_fixed(): | |
"""Fixed version of IBTrACS data loading""" | |
ibtracs_data = {} | |
# Try to load each basin, but prioritize WP for this application | |
load_order = ['WP', 'EP', 'NA'] | |
for basin in load_order: | |
try: | |
logging.info(f"Loading {basin} basin data...") | |
df = load_ibtracs_csv_directly(basin) | |
if df is not None and not df.empty: | |
ibtracs_data[basin] = df | |
logging.info(f"Successfully loaded {basin} basin with {len(df)} records") | |
else: | |
logging.warning(f"No data loaded for basin {basin}") | |
ibtracs_data[basin] = None | |
except Exception as e: | |
logging.error(f"Failed to load basin {basin}: {e}") | |
ibtracs_data[basin] = None | |
return ibtracs_data | |
def load_data_fixed(oni_path, typhoon_path): | |
"""Fixed version of load_data function""" | |
# Load ONI data | |
oni_data = pd.DataFrame({'Year': [], 'Jan': [], 'Feb': [], 'Mar': [], 'Apr': [], | |
'May': [], 'Jun': [], 'Jul': [], 'Aug': [], 'Sep': [], | |
'Oct': [], 'Nov': [], 'Dec': []}) | |
if not os.path.exists(oni_path): | |
logging.warning(f"ONI data file not found: {oni_path}") | |
update_oni_data() | |
try: | |
oni_data = pd.read_csv(oni_path) | |
logging.info(f"Successfully loaded ONI data with {len(oni_data)} years") | |
except Exception as e: | |
logging.error(f"Error loading ONI data: {e}") | |
update_oni_data() | |
try: | |
oni_data = pd.read_csv(oni_path) | |
except Exception as e: | |
logging.error(f"Still can't load ONI data: {e}") | |
# Load typhoon data - NEW APPROACH | |
typhoon_data = None | |
# First, try to load from existing processed file | |
if os.path.exists(typhoon_path): | |
try: | |
typhoon_data = pd.read_csv(typhoon_path, low_memory=False) | |
# Ensure basic columns exist and are valid | |
required_cols = ['LAT', 'LON'] | |
if all(col in typhoon_data.columns for col in required_cols): | |
if 'ISO_TIME' in typhoon_data.columns: | |
typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce') | |
logging.info(f"Loaded processed typhoon data with {len(typhoon_data)} records") | |
else: | |
logging.warning("Processed typhoon data missing required columns, will reload from IBTrACS") | |
typhoon_data = None | |
except Exception as e: | |
logging.error(f"Error loading processed typhoon data: {e}") | |
typhoon_data = None | |
# If no valid processed data, load from IBTrACS | |
if typhoon_data is None or typhoon_data.empty: | |
logging.info("Loading typhoon data from IBTrACS...") | |
ibtracs_data = load_ibtracs_data_fixed() | |
# Combine all available basin data, prioritizing WP | |
combined_dfs = [] | |
for basin in ['WP', 'EP', 'NA']: | |
if basin in ibtracs_data and ibtracs_data[basin] is not None: | |
df = ibtracs_data[basin].copy() | |
df['BASIN'] = basin | |
combined_dfs.append(df) | |
if combined_dfs: | |
typhoon_data = pd.concat(combined_dfs, ignore_index=True) | |
# Ensure SID has proper format | |
if 'SID' not in typhoon_data.columns and 'BASIN' in typhoon_data.columns: | |
# Create SID from basin and other identifiers if missing | |
if 'SEASON' in typhoon_data.columns: | |
typhoon_data['SID'] = (typhoon_data['BASIN'].astype(str) + | |
typhoon_data.index.astype(str).str.zfill(2) + | |
typhoon_data['SEASON'].astype(str)) | |
else: | |
typhoon_data['SID'] = (typhoon_data['BASIN'].astype(str) + | |
typhoon_data.index.astype(str).str.zfill(2) + | |
'2000') | |
# Save the processed data for future use | |
safe_file_write(typhoon_path, typhoon_data, get_fallback_data_dir()) | |
logging.info(f"Combined IBTrACS data: {len(typhoon_data)} total records") | |
else: | |
logging.error("Failed to load any IBTrACS basin data") | |
# Create minimal fallback data | |
typhoon_data = create_fallback_typhoon_data() | |
# Final validation of typhoon data | |
if typhoon_data is not None: | |
# Ensure required columns exist with fallback values | |
required_columns = { | |
'SID': 'UNKNOWN', | |
'ISO_TIME': pd.Timestamp('2000-01-01'), | |
'LAT': 0.0, | |
'LON': 0.0, | |
'USA_WIND': np.nan, | |
'USA_PRES': np.nan, | |
'NAME': 'UNNAMED', | |
'SEASON': 2000 | |
} | |
for col, default_val in required_columns.items(): | |
if col not in typhoon_data.columns: | |
typhoon_data[col] = default_val | |
logging.warning(f"Added missing column {col} with default value") | |
# Ensure data types | |
if 'ISO_TIME' in typhoon_data.columns: | |
typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce') | |
typhoon_data['LAT'] = pd.to_numeric(typhoon_data['LAT'], errors='coerce') | |
typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce') | |
typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce') | |
typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce') | |
# Remove rows with invalid coordinates | |
typhoon_data = typhoon_data.dropna(subset=['LAT', 'LON']) | |
logging.info(f"Final typhoon data: {len(typhoon_data)} records after validation") | |
return oni_data, typhoon_data | |
def create_fallback_typhoon_data(): | |
"""Create minimal fallback typhoon data - FIXED VERSION""" | |
# Use proper pandas date_range instead of numpy | |
dates = pd.date_range(start='2000-01-01', end='2023-12-31', freq='D') | |
storm_dates = dates[np.random.choice(len(dates), size=100, replace=False)] | |
data = [] | |
for i, date in enumerate(storm_dates): | |
# Create realistic WP storm tracks | |
base_lat = np.random.uniform(10, 30) | |
base_lon = np.random.uniform(130, 160) | |
# Generate 20-50 data points per storm | |
track_length = np.random.randint(20, 51) | |
sid = f"WP{i+1:02d}{date.year}" | |
for j in range(track_length): | |
lat = base_lat + j * 0.2 + np.random.normal(0, 0.1) | |
lon = base_lon + j * 0.3 + np.random.normal(0, 0.1) | |
wind = max(25, 70 + np.random.normal(0, 20)) | |
pres = max(950, 1000 - wind + np.random.normal(0, 5)) | |
data.append({ | |
'SID': sid, | |
'ISO_TIME': date + pd.Timedelta(hours=j*6), # Use pd.Timedelta instead | |
'NAME': f'FALLBACK_{i+1}', | |
'SEASON': date.year, | |
'LAT': lat, | |
'LON': lon, | |
'USA_WIND': wind, | |
'USA_PRES': pres, | |
'BASIN': 'WP' | |
}) | |
df = pd.DataFrame(data) | |
logging.info(f"Created fallback typhoon data with {len(df)} records") | |
return df | |
def process_oni_data(oni_data): | |
"""Process ONI data into long format""" | |
oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI') | |
month_map = {'Jan':'01','Feb':'02','Mar':'03','Apr':'04','May':'05','Jun':'06', | |
'Jul':'07','Aug':'08','Sep':'09','Oct':'10','Nov':'11','Dec':'12'} | |
oni_long['Month'] = oni_long['Month'].map(month_map) | |
oni_long['Date'] = pd.to_datetime(oni_long['Year'].astype(str)+'-'+oni_long['Month']+'-01') | |
oni_long['ONI'] = pd.to_numeric(oni_long['ONI'], errors='coerce') | |
return oni_long | |
def process_typhoon_data(typhoon_data): | |
"""Process typhoon data""" | |
if 'ISO_TIME' in typhoon_data.columns: | |
typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce') | |
typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce') | |
typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce') | |
typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce') | |
logging.info(f"Unique basins in typhoon_data: {typhoon_data['SID'].str[:2].unique()}") | |
typhoon_max = typhoon_data.groupby('SID').agg({ | |
'USA_WIND':'max','USA_PRES':'min','ISO_TIME':'first','SEASON':'first','NAME':'first', | |
'LAT':'first','LON':'first' | |
}).reset_index() | |
if 'ISO_TIME' in typhoon_max.columns: | |
typhoon_max['Month'] = typhoon_max['ISO_TIME'].dt.strftime('%m') | |
typhoon_max['Year'] = typhoon_max['ISO_TIME'].dt.year | |
else: | |
# Fallback if no ISO_TIME | |
typhoon_max['Month'] = '01' | |
typhoon_max['Year'] = typhoon_max['SEASON'] | |
typhoon_max['Category'] = typhoon_max['USA_WIND'].apply(categorize_typhoon) | |
return typhoon_max | |
def merge_data(oni_long, typhoon_max): | |
"""Merge ONI and typhoon data""" | |
return pd.merge(typhoon_max, oni_long, on=['Year','Month']) | |
def categorize_typhoon(wind_speed): | |
"""Categorize typhoon based on wind speed""" | |
if pd.isna(wind_speed): | |
return 'Tropical Depression' | |
if wind_speed >= 137: | |
return 'C5 Super Typhoon' | |
elif wind_speed >= 113: | |
return 'C4 Very Strong Typhoon' | |
elif wind_speed >= 96: | |
return 'C3 Strong Typhoon' | |
elif wind_speed >= 83: | |
return 'C2 Typhoon' | |
elif wind_speed >= 64: | |
return 'C1 Typhoon' | |
elif wind_speed >= 34: | |
return 'Tropical Storm' | |
else: | |
return 'Tropical Depression' | |
def classify_enso_phases(oni_value): | |
"""Classify ENSO phases based on ONI value""" | |
if isinstance(oni_value, pd.Series): | |
oni_value = oni_value.iloc[0] | |
if pd.isna(oni_value): | |
return 'Neutral' | |
if oni_value >= 0.5: | |
return 'El Nino' | |
elif oni_value <= -0.5: | |
return 'La Nina' | |
else: | |
return 'Neutral' | |
# ----------------------------- | |
# Regression Functions | |
# ----------------------------- | |
def perform_wind_regression(start_year, start_month, end_year, end_month): | |
"""Perform wind regression analysis""" | |
start_date = datetime(start_year, start_month, 1) | |
end_date = datetime(end_year, end_month, 28) | |
data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].dropna(subset=['USA_WIND','ONI']) | |
data['severe_typhoon'] = (data['USA_WIND']>=64).astype(int) | |
X = sm.add_constant(data['ONI']) | |
y = data['severe_typhoon'] | |
try: | |
model = sm.Logit(y, X).fit(disp=0) | |
beta_1 = model.params['ONI'] | |
exp_beta_1 = np.exp(beta_1) | |
p_value = model.pvalues['ONI'] | |
return f"Wind Regression: β1={beta_1:.4f}, Odds Ratio={exp_beta_1:.4f}, P-value={p_value:.4f}" | |
except Exception as e: | |
return f"Wind Regression Error: {e}" | |
def perform_pressure_regression(start_year, start_month, end_year, end_month): | |
"""Perform pressure regression analysis""" | |
start_date = datetime(start_year, start_month, 1) | |
end_date = datetime(end_year, end_month, 28) | |
data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].dropna(subset=['USA_PRES','ONI']) | |
data['intense_typhoon'] = (data['USA_PRES']<=950).astype(int) | |
X = sm.add_constant(data['ONI']) | |
y = data['intense_typhoon'] | |
try: | |
model = sm.Logit(y, X).fit(disp=0) | |
beta_1 = model.params['ONI'] | |
exp_beta_1 = np.exp(beta_1) | |
p_value = model.pvalues['ONI'] | |
return f"Pressure Regression: β1={beta_1:.4f}, Odds Ratio={exp_beta_1:.4f}, P-value={p_value:.4f}" | |
except Exception as e: | |
return f"Pressure Regression Error: {e}" | |
def perform_longitude_regression(start_year, start_month, end_year, end_month): | |
"""Perform longitude regression analysis""" | |
start_date = datetime(start_year, start_month, 1) | |
end_date = datetime(end_year, end_month, 28) | |
data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].dropna(subset=['LON','ONI']) | |
data['western_typhoon'] = (data['LON']<=140).astype(int) | |
X = sm.add_constant(data['ONI']) | |
y = data['western_typhoon'] | |
try: | |
model = sm.OLS(y, sm.add_constant(X)).fit() | |
beta_1 = model.params['ONI'] | |
exp_beta_1 = np.exp(beta_1) | |
p_value = model.pvalues['ONI'] | |
return f"Longitude Regression: β1={beta_1:.4f}, Odds Ratio={exp_beta_1:.4f}, P-value={p_value:.4f}" | |
except Exception as e: | |
return f"Longitude Regression Error: {e}" | |
# ----------------------------- | |
# Visualization Functions | |
# ----------------------------- | |
def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, typhoon_search): | |
"""Get full typhoon tracks""" | |
start_date = datetime(start_year, start_month, 1) | |
end_date = datetime(end_year, end_month, 28) | |
filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy() | |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases) | |
if enso_phase != 'all': | |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()] | |
unique_storms = filtered_data['SID'].unique() | |
count = len(unique_storms) | |
fig = go.Figure() | |
for sid in unique_storms: | |
storm_data = typhoon_data[typhoon_data['SID']==sid] | |
if storm_data.empty: | |
continue | |
name = storm_data['NAME'].iloc[0] if pd.notnull(storm_data['NAME'].iloc[0]) else "Unnamed" | |
basin = storm_data['SID'].iloc[0][:2] | |
storm_oni = filtered_data[filtered_data['SID']==sid]['ONI'].iloc[0] | |
color = 'red' if storm_oni>=0.5 else ('blue' if storm_oni<=-0.5 else 'green') | |
fig.add_trace(go.Scattergeo( | |
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines', | |
name=f"{name} ({basin})", | |
line=dict(width=1.5, color=color), hoverinfo="name" | |
)) | |
if typhoon_search: | |
search_mask = typhoon_data['NAME'].str.contains(typhoon_search, case=False, na=False) | |
if search_mask.any(): | |
for sid in typhoon_data[search_mask]['SID'].unique(): | |
storm_data = typhoon_data[typhoon_data['SID']==sid] | |
fig.add_trace(go.Scattergeo( | |
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines+markers', | |
name=f"MATCHED: {storm_data['NAME'].iloc[0]}", | |
line=dict(width=3, color='yellow'), | |
marker=dict(size=5), hoverinfo="name" | |
)) | |
fig.update_layout( | |
title=f"Typhoon Tracks ({start_year}-{start_month} to {end_year}-{end_month})", | |
geo=dict( | |
projection_type='natural earth', | |
showland=True, | |
showcoastlines=True, | |
landcolor='rgb(243,243,243)', | |
countrycolor='rgb(204,204,204)', | |
coastlinecolor='rgb(204,204,204)', | |
center=dict(lon=140, lat=20), | |
projection_scale=3 | |
), | |
legend_title="Typhoons by ENSO Phase", | |
showlegend=True, | |
height=700 | |
) | |
fig.add_annotation( | |
x=0.02, y=0.98, xref="paper", yref="paper", | |
text="Red: El Niño, Blue: La Nina, Green: Neutral", | |
showarrow=False, align="left", | |
bgcolor="rgba(255,255,255,0.8)" | |
) | |
return fig, f"Total typhoons displayed: {count}" | |
def get_wind_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search): | |
"""Get wind analysis""" | |
start_date = datetime(start_year, start_month, 1) | |
end_date = datetime(end_year, end_month, 28) | |
filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy() | |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases) | |
if enso_phase != 'all': | |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()] | |
fig = px.scatter(filtered_data, x='ONI', y='USA_WIND', color='Category', | |
hover_data=['NAME','Year','Category'], | |
title='Wind Speed vs ONI', | |
labels={'ONI':'ONI Value','USA_WIND':'Max Wind Speed (knots)'}, | |
color_discrete_map=color_map) | |
if typhoon_search: | |
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False) | |
if mask.any(): | |
fig.add_trace(go.Scatter( | |
x=filtered_data.loc[mask,'ONI'], y=filtered_data.loc[mask,'USA_WIND'], | |
mode='markers', marker=dict(size=10, color='red', symbol='star'), | |
name=f'Matched: {typhoon_search}', | |
text=filtered_data.loc[mask,'NAME']+' ('+filtered_data.loc[mask,'Year'].astype(str)+')' | |
)) | |
regression = perform_wind_regression(start_year, start_month, end_year, end_month) | |
return fig, regression | |
def get_pressure_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search): | |
"""Get pressure analysis""" | |
start_date = datetime(start_year, start_month, 1) | |
end_date = datetime(end_year, end_month, 28) | |
filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy() | |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases) | |
if enso_phase != 'all': | |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()] | |
fig = px.scatter(filtered_data, x='ONI', y='USA_PRES', color='Category', | |
hover_data=['NAME','Year','Category'], | |
title='Pressure vs ONI', | |
labels={'ONI':'ONI Value','USA_PRES':'Min Pressure (hPa)'}, | |
color_discrete_map=color_map) | |
if typhoon_search: | |
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False) | |
if mask.any(): | |
fig.add_trace(go.Scatter( | |
x=filtered_data.loc[mask,'ONI'], y=filtered_data.loc[mask,'USA_PRES'], | |
mode='markers', marker=dict(size=10, color='red', symbol='star'), | |
name=f'Matched: {typhoon_search}', | |
text=filtered_data.loc[mask,'NAME']+' ('+filtered_data.loc[mask,'Year'].astype(str)+')' | |
)) | |
regression = perform_pressure_regression(start_year, start_month, end_year, end_month) | |
return fig, regression | |
def get_longitude_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search): | |
"""Get longitude analysis""" | |
start_date = datetime(start_year, start_month, 1) | |
end_date = datetime(end_year, end_month, 28) | |
filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy() | |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases) | |
if enso_phase != 'all': | |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()] | |
fig = px.scatter(filtered_data, x='LON', y='ONI', hover_data=['NAME'], | |
title='Typhoon Generation Longitude vs ONI (All Years)') | |
if len(filtered_data) > 1: | |
X = np.array(filtered_data['LON']).reshape(-1,1) | |
y = filtered_data['ONI'] | |
try: | |
model = sm.OLS(y, sm.add_constant(X)).fit() | |
y_pred = model.predict(sm.add_constant(X)) | |
fig.add_trace(go.Scatter(x=filtered_data['LON'], y=y_pred, mode='lines', name='Regression Line')) | |
slope = model.params[1] | |
slopes_text = f"All Years Slope: {slope:.4f}" | |
except Exception as e: | |
slopes_text = f"Regression Error: {e}" | |
else: | |
slopes_text = "Insufficient data for regression" | |
regression = perform_longitude_regression(start_year, start_month, end_year, end_month) | |
return fig, slopes_text, regression | |
def categorize_typhoon_by_standard(wind_speed, standard='atlantic'): | |
"""Categorize typhoon by standard""" | |
if pd.isna(wind_speed): | |
return 'Tropical Depression', '#808080' | |
if standard=='taiwan': | |
wind_speed_ms = wind_speed * 0.514444 | |
if wind_speed_ms >= 51.0: | |
return 'Strong Typhoon', taiwan_standard['Strong Typhoon']['hex'] | |
elif wind_speed_ms >= 33.7: | |
return 'Medium Typhoon', taiwan_standard['Medium Typhoon']['hex'] | |
elif wind_speed_ms >= 17.2: | |
return 'Mild Typhoon', taiwan_standard['Mild Typhoon']['hex'] | |
return 'Tropical Depression', taiwan_standard['Tropical Depression']['hex'] | |
else: | |
if wind_speed >= 137: | |
return 'C5 Super Typhoon', atlantic_standard['C5 Super Typhoon']['hex'] | |
elif wind_speed >= 113: | |
return 'C4 Very Strong Typhoon', atlantic_standard['C4 Very Strong Typhoon']['hex'] | |
elif wind_speed >= 96: | |
return 'C3 Strong Typhoon', atlantic_standard['C3 Strong Typhoon']['hex'] | |
elif wind_speed >= 83: | |
return 'C2 Typhoon', atlantic_standard['C2 Typhoon']['hex'] | |
elif wind_speed >= 64: | |
return 'C1 Typhoon', atlantic_standard['C1 Typhoon']['hex'] | |
elif wind_speed >= 34: | |
return 'Tropical Storm', atlantic_standard['Tropical Storm']['hex'] | |
return 'Tropical Depression', atlantic_standard['Tropical Depression']['hex'] | |
# ----------------------------- | |
# Animation Functions | |
# ----------------------------- | |
def generate_track_video_from_csv(year, storm_id, standard): | |
"""Generate track video from CSV data""" | |
storm_df = typhoon_data[typhoon_data['SID'] == storm_id].copy() | |
if storm_df.empty: | |
logging.error(f"No data found for storm: {storm_id}") | |
return None | |
storm_df = storm_df.sort_values('ISO_TIME') | |
lats = storm_df['LAT'].astype(float).values | |
lons = storm_df['LON'].astype(float).values | |
times = pd.to_datetime(storm_df['ISO_TIME']).values | |
if 'USA_WIND' in storm_df.columns: | |
winds = pd.to_numeric(storm_df['USA_WIND'], errors='coerce').values | |
else: | |
winds = np.full(len(lats), np.nan) | |
storm_name = storm_df['NAME'].iloc[0] if pd.notnull(storm_df['NAME'].iloc[0]) else "Unnamed" | |
basin = storm_df['SID'].iloc[0][:2] | |
season = storm_df['SEASON'].iloc[0] if 'SEASON' in storm_df.columns else year | |
min_lat, max_lat = np.min(lats), np.max(lats) | |
min_lon, max_lon = np.min(lons), np.max(lons) | |
lat_padding = max((max_lat - min_lat)*0.3, 5) | |
lon_padding = max((max_lon - min_lon)*0.3, 5) | |
fig = plt.figure(figsize=(12,6), dpi=100) | |
ax = plt.axes([0.05, 0.05, 0.60, 0.85], | |
projection=ccrs.PlateCarree(central_longitude=180)) | |
ax.stock_img() | |
ax.set_extent([min_lon - lon_padding, max_lon + lon_padding, min_lat - lat_padding, max_lat + lat_padding], | |
crs=ccrs.PlateCarree()) | |
ax.coastlines(resolution='50m', color='black', linewidth=1) | |
gl = ax.gridlines(draw_labels=True, color='gray', alpha=0.4, linestyle='--') | |
gl.top_labels = gl.right_labels = False | |
ax.set_title(f"{year} {storm_name} ({basin}) - {season}", fontsize=14) | |
line, = ax.plot([], [], transform=ccrs.PlateCarree(), color='blue', linewidth=2) | |
point, = ax.plot([], [], 'o', markersize=8, transform=ccrs.PlateCarree()) | |
date_text = ax.text(0.02, 0.02, '', transform=ax.transAxes, fontsize=10, | |
bbox=dict(facecolor='white', alpha=0.8)) | |
storm_info_text = fig.text(0.70, 0.60, '', fontsize=10, | |
bbox=dict(facecolor='white', alpha=0.8, boxstyle='round,pad=0.5')) | |
from matplotlib.lines import Line2D | |
standard_dict = atlantic_standard if standard=='atlantic' else taiwan_standard | |
legend_elements = [Line2D([0],[0], marker='o', color='w', label=cat, | |
markerfacecolor=details['hex'], markersize=8) | |
for cat, details in standard_dict.items()] | |
ax.legend(handles=legend_elements, title="Storm Categories", | |
loc='upper right', fontsize=9) | |
def init(): | |
line.set_data([], []) | |
point.set_data([], []) | |
date_text.set_text('') | |
storm_info_text.set_text('') | |
return line, point, date_text, storm_info_text | |
def update(frame): | |
line.set_data(lons[:frame+1], lats[:frame+1]) | |
point.set_data([lons[frame]], [lats[frame]]) | |
wind_speed = winds[frame] if frame < len(winds) and not pd.isna(winds[frame]) else 0 | |
category, color = categorize_typhoon_by_standard(wind_speed, standard) | |
point.set_color(color) | |
dt_str = pd.to_datetime(times[frame]).strftime('%Y-%m-%d %H:%M') | |
date_text.set_text(dt_str) | |
info_str = (f"Name: {storm_name}\nBasin: {basin}\nDate: {dt_str}\nWind: {wind_speed:.1f} kt\nCategory: {category}") | |
storm_info_text.set_text(info_str) | |
return line, point, date_text, storm_info_text | |
ani = animation.FuncAnimation(fig, update, init_func=init, frames=len(times), | |
interval=200, blit=True, repeat=True) | |
# Create animation file | |
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.mp4', dir=DATA_PATH) | |
try: | |
writer = animation.FFMpegWriter(fps=5, bitrate=1800) | |
ani.save(temp_file.name, writer=writer) | |
plt.close(fig) | |
return temp_file.name | |
except Exception as e: | |
logging.error(f"Error creating animation: {e}") | |
plt.close(fig) | |
return None | |
def simplified_track_video(year, basin, typhoon, standard): | |
"""Simplified track video function""" | |
if not typhoon: | |
return None | |
storm_id = typhoon.split('(')[-1].strip(')') | |
return generate_track_video_from_csv(year, storm_id, standard) | |
# ----------------------------- | |
# Update Typhoon Options Function | |
# ----------------------------- | |
def update_typhoon_options_fixed(year, basin): | |
"""Fixed version of update_typhoon_options""" | |
try: | |
# Use the typhoon_data already loaded | |
if typhoon_data is None or typhoon_data.empty: | |
logging.error("No typhoon data available") | |
return gr.update(choices=[], value=None) | |
# Filter by year | |
if 'ISO_TIME' in typhoon_data.columns: | |
year_data = typhoon_data[typhoon_data['ISO_TIME'].dt.year == int(year)].copy() | |
elif 'SEASON' in typhoon_data.columns: | |
year_data = typhoon_data[typhoon_data['SEASON'] == int(year)].copy() | |
else: | |
# Fallback: use all data | |
year_data = typhoon_data.copy() | |
if basin != "All Basins": | |
# Extract basin code | |
basin_code = basin.split(' - ')[0] if ' - ' in basin else basin[:2] | |
# Filter by basin | |
if 'SID' in year_data.columns: | |
year_data = year_data[year_data['SID'].str.startswith(basin_code, na=False)] | |
elif 'BASIN' in year_data.columns: | |
year_data = year_data[year_data['BASIN'] == basin_code] | |
if year_data.empty: | |
logging.warning(f"No storms found for year {year} and basin {basin}") | |
return gr.update(choices=[], value=None) | |
# Get unique storms and create options | |
storms = year_data.groupby('SID').first().reset_index() | |
options = [] | |
for _, storm in storms.iterrows(): | |
name = storm.get('NAME', 'UNNAMED') | |
if pd.isna(name) or name == '' or name == 'UNNAMED': | |
name = 'UNNAMED' | |
sid = storm['SID'] | |
options.append(f"{name} ({sid})") | |
if not options: | |
return gr.update(choices=[], value=None) | |
return gr.update(choices=sorted(options), value=options[0]) | |
except Exception as e: | |
logging.error(f"Error in update_typhoon_options_fixed: {e}") | |
return gr.update(choices=[], value=None) | |
# ----------------------------- | |
# Load & Process Data | |
# ----------------------------- | |
# Global variables initialization | |
oni_data = None | |
typhoon_data = None | |
merged_data = None | |
def initialize_data(): | |
"""Initialize all data safely""" | |
global oni_data, typhoon_data, merged_data | |
try: | |
logging.info("Starting data loading process...") | |
update_oni_data() | |
oni_data, typhoon_data = load_data_fixed(ONI_DATA_PATH, TYPHOON_DATA_PATH) | |
if oni_data is not None and typhoon_data is not None: | |
oni_long = process_oni_data(oni_data) | |
typhoon_max = process_typhoon_data(typhoon_data) | |
merged_data = merge_data(oni_long, typhoon_max) | |
logging.info("Data loading complete.") | |
else: | |
logging.error("Failed to load required data") | |
# Create minimal fallback data | |
oni_data = pd.DataFrame({'Year': [2000], 'Jan': [0], 'Feb': [0], 'Mar': [0], 'Apr': [0], | |
'May': [0], 'Jun': [0], 'Jul': [0], 'Aug': [0], 'Sep': [0], | |
'Oct': [0], 'Nov': [0], 'Dec': [0]}) | |
typhoon_data = create_fallback_typhoon_data() | |
oni_long = process_oni_data(oni_data) | |
typhoon_max = process_typhoon_data(typhoon_data) | |
merged_data = merge_data(oni_long, typhoon_max) | |
except Exception as e: | |
logging.error(f"Error during data initialization: {e}") | |
# Create minimal fallback data | |
oni_data = pd.DataFrame({'Year': [2000], 'Jan': [0], 'Feb': [0], 'Mar': [0], 'Apr': [0], | |
'May': [0], 'Jun': [0], 'Jul': [0], 'Aug': [0], 'Sep': [0], | |
'Oct': [0], 'Nov': [0], 'Dec': [0]}) | |
typhoon_data = create_fallback_typhoon_data() | |
oni_long = process_oni_data(oni_data) | |
typhoon_max = process_typhoon_data(typhoon_data) | |
merged_data = merge_data(oni_long, typhoon_max) | |
# Initialize data | |
initialize_data() | |
# ----------------------------- | |
# Simplified Gradio Interface | |
# ----------------------------- | |
def create_interface(): | |
"""Create the Gradio interface with error handling""" | |
try: | |
# Initialize components with safe defaults | |
with gr.Blocks() as demo: | |
gr.Markdown("# Typhoon Analysis Dashboard") | |
with gr.Tab("Overview"): | |
gr.Markdown(f""" | |
## Welcome to the Typhoon Analysis Dashboard | |
This dashboard allows you to analyze typhoon data in relation to ENSO phases. | |
### Features: | |
- **Track Visualization**: View typhoon tracks by time period and ENSO phase. | |
- **Wind Analysis**: Examine wind speed vs ONI relationships. | |
- **Pressure Analysis**: Analyze pressure vs ONI relationships. | |
- **Longitude Analysis**: Study typhoon generation longitude vs ONI. | |
- **Path Animation**: View animated storm tracks on a world map. | |
### Data Status: | |
- **ONI Data**: {len(oni_data)} years loaded | |
- **Typhoon Data**: {len(typhoon_data)} records loaded | |
- **Merged Data**: {len(merged_data)} typhoons with ONI values | |
""") | |
with gr.Tab("Track Visualization"): | |
with gr.Row(): | |
start_year = gr.Number(label="Start Year", value=2000) | |
start_month = gr.Dropdown(label="Start Month", choices=list(range(1, 13)), value=1) | |
end_year = gr.Number(label="End Year", value=2024) | |
end_month = gr.Dropdown(label="End Month", choices=list(range(1, 13)), value=6) | |
enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all', 'El Nino', 'La Nina', 'Neutral'], value='all') | |
typhoon_search = gr.Textbox(label="Typhoon Search") | |
analyze_btn = gr.Button("Generate Tracks") | |
tracks_plot = gr.Plot() | |
typhoon_count = gr.Textbox(label="Number of Typhoons Displayed") | |
analyze_btn.click( | |
fn=get_full_tracks, | |
inputs=[start_year, start_month, end_year, end_month, enso_phase, typhoon_search], | |
outputs=[tracks_plot, typhoon_count] | |
) | |
with gr.Tab("Wind Analysis"): | |
with gr.Row(): | |
wind_start_year = gr.Number(label="Start Year", value=2000) | |
wind_start_month = gr.Dropdown(label="Start Month", choices=list(range(1, 13)), value=1) | |
wind_end_year = gr.Number(label="End Year", value=2024) | |
wind_end_month = gr.Dropdown(label="End Month", choices=list(range(1, 13)), value=6) | |
wind_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all', 'El Nino', 'La Nina', 'Neutral'], value='all') | |
wind_typhoon_search = gr.Textbox(label="Typhoon Search") | |
wind_analyze_btn = gr.Button("Generate Wind Analysis") | |
wind_scatter = gr.Plot() | |
wind_regression_results = gr.Textbox(label="Wind Regression Results") | |
wind_analyze_btn.click( | |
fn=get_wind_analysis, | |
inputs=[wind_start_year, wind_start_month, wind_end_year, wind_end_month, wind_enso_phase, wind_typhoon_search], | |
outputs=[wind_scatter, wind_regression_results] | |
) | |
with gr.Tab("Pressure Analysis"): | |
with gr.Row(): | |
pressure_start_year = gr.Number(label="Start Year", value=2000) | |
pressure_start_month = gr.Dropdown(label="Start Month", choices=list(range(1, 13)), value=1) | |
pressure_end_year = gr.Number(label="End Year", value=2024) | |
pressure_end_month = gr.Dropdown(label="End Month", choices=list(range(1, 13)), value=6) | |
pressure_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all', 'El Nino', 'La Nina', 'Neutral'], value='all') | |
pressure_typhoon_search = gr.Textbox(label="Typhoon Search") | |
pressure_analyze_btn = gr.Button("Generate Pressure Analysis") | |
pressure_scatter = gr.Plot() | |
pressure_regression_results = gr.Textbox(label="Pressure Regression Results") | |
pressure_analyze_btn.click( | |
fn=get_pressure_analysis, | |
inputs=[pressure_start_year, pressure_start_month, pressure_end_year, pressure_end_month, pressure_enso_phase, pressure_typhoon_search], | |
outputs=[pressure_scatter, pressure_regression_results] | |
) | |
with gr.Tab("Longitude Analysis"): | |
with gr.Row(): | |
lon_start_year = gr.Number(label="Start Year", value=2000) | |
lon_start_month = gr.Dropdown(label="Start Month", choices=list(range(1, 13)), value=1) | |
lon_end_year = gr.Number(label="End Year", value=2000) | |
lon_end_month = gr.Dropdown(label="End Month", choices=list(range(1, 13)), value=6) | |
lon_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all', 'El Nino', 'La Nina', 'Neutral'], value='all') | |
lon_typhoon_search = gr.Textbox(label="Typhoon Search (Optional)") | |
lon_analyze_btn = gr.Button("Generate Longitude Analysis") | |
regression_plot = gr.Plot() | |
slopes_text = gr.Textbox(label="Regression Slopes") | |
lon_regression_results = gr.Textbox(label="Longitude Regression Results") | |
lon_analyze_btn.click( | |
fn=get_longitude_analysis, | |
inputs=[lon_start_year, lon_start_month, lon_end_year, lon_end_month, lon_enso_phase, lon_typhoon_search], | |
outputs=[regression_plot, slopes_text, lon_regression_results] | |
) | |
with gr.Tab("Tropical Cyclone Path Animation"): | |
with gr.Row(): | |
year_dropdown = gr.Dropdown(label="Year", choices=[str(y) for y in range(1950, 2025)], value="2000") | |
basin_constant = gr.Textbox(value="All Basins", visible=False) | |
with gr.Row(): | |
typhoon_dropdown = gr.Dropdown(label="Tropical Cyclone") | |
standard_dropdown = gr.Dropdown(label="Classification Standard", choices=['atlantic', 'taiwan'], value='atlantic') | |
animate_btn = gr.Button("Generate Animation") | |
path_video = gr.Video() | |
animation_info = gr.Markdown(""" | |
### Animation Instructions | |
1. Select a year. | |
2. Choose a tropical cyclone from the populated list. | |
3. Select a classification standard (Atlantic or Taiwan). | |
4. Click "Generate Animation". | |
5. The animation displays the storm track on a world map with dynamic sidebar information. | |
""") | |
# Update typhoon dropdown | |
year_dropdown.change( | |
fn=update_typhoon_options_fixed, | |
inputs=[year_dropdown, basin_constant], | |
outputs=typhoon_dropdown | |
) | |
animate_btn.click( | |
fn=simplified_track_video, | |
inputs=[year_dropdown, basin_constant, typhoon_dropdown, standard_dropdown], | |
outputs=path_video | |
) | |
return demo | |
except Exception as e: | |
logging.error(f"Error creating Gradio interface: {e}") | |
# Create a minimal fallback interface | |
with gr.Blocks() as demo: | |
gr.Markdown("# Typhoon Analysis Dashboard") | |
gr.Markdown("**Error**: Could not load full interface. Please check logs.") | |
return demo | |
# Create and launch the interface | |
demo = create_interface() | |
if __name__ == "__main__": | |
demo.launch() |