Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -45,19 +45,8 @@ logging.basicConfig(
|
|
45 |
format='%(asctime)s - %(levelname)s - %(message)s'
|
46 |
)
|
47 |
|
48 |
-
parser
|
49 |
-
|
50 |
-
args = parser.parse_args()
|
51 |
-
|
52 |
-
# Enhanced data path handling for HuggingFace Spaces
|
53 |
-
if 'SPACE_ID' in os.environ:
|
54 |
-
# Running on HuggingFace Spaces
|
55 |
-
DATA_PATH = '/tmp/typhoon_data'
|
56 |
-
os.makedirs(DATA_PATH, exist_ok=True)
|
57 |
-
logging.info(f"Running on HuggingFace Spaces, using data path: {DATA_PATH}")
|
58 |
-
else:
|
59 |
-
# Local development
|
60 |
-
DATA_PATH = os.environ.get('DATA_PATH', tempfile.gettempdir())
|
61 |
|
62 |
# Ensure directory exists and is writable
|
63 |
try:
|
@@ -117,23 +106,6 @@ taiwan_standard = {
|
|
117 |
'Tropical Depression': {'wind_speed': 0, 'color': 'Gray', 'hex': '#808080'}
|
118 |
}
|
119 |
|
120 |
-
# -----------------------------
|
121 |
-
# Season and Regions
|
122 |
-
# -----------------------------
|
123 |
-
season_months = {
|
124 |
-
'all': list(range(1, 13)),
|
125 |
-
'summer': [6, 7, 8],
|
126 |
-
'winter': [12, 1, 2]
|
127 |
-
}
|
128 |
-
regions = {
|
129 |
-
"Taiwan Land": {"lat_min": 21.8, "lat_max": 25.3, "lon_min": 119.5, "lon_max": 122.1},
|
130 |
-
"Taiwan Sea": {"lat_min": 19, "lat_max": 28, "lon_min": 117, "lon_max": 125},
|
131 |
-
"Japan": {"lat_min": 20, "lat_max": 45, "lon_min": 120, "lon_max": 150},
|
132 |
-
"China": {"lat_min": 18, "lat_max": 53, "lon_min": 73, "lon_max": 135},
|
133 |
-
"Hong Kong": {"lat_min": 21.5, "lat_max": 23, "lon_min": 113, "lon_max": 115},
|
134 |
-
"Philippines": {"lat_min": 5, "lat_max": 21, "lon_min": 115, "lon_max": 130}
|
135 |
-
}
|
136 |
-
|
137 |
# -----------------------------
|
138 |
# Utility Functions for HF Spaces
|
139 |
# -----------------------------
|
@@ -168,6 +140,7 @@ def safe_file_write(file_path, data_frame, backup_dir=None):
|
|
168 |
except Exception as e:
|
169 |
logging.error(f"Error saving file {file_path}: {e}")
|
170 |
# Clean up temp file if it exists
|
|
|
171 |
if os.path.exists(temp_path):
|
172 |
try:
|
173 |
os.remove(temp_path)
|
@@ -379,9 +352,9 @@ def load_ibtracs_csv_directly(basin='WP'):
|
|
379 |
logging.error(f"Missing critical columns. Available: {list(df.columns)}")
|
380 |
return None
|
381 |
|
382 |
-
# Clean and standardize the data
|
383 |
if 'ISO_TIME' in df.columns:
|
384 |
-
df['ISO_TIME'] = pd.to_datetime(df['ISO_TIME'], errors='coerce')
|
385 |
|
386 |
# Clean numeric columns
|
387 |
numeric_columns = ['LAT', 'LON', 'WMO_WIND', 'WMO_PRES', 'USA_WIND', 'USA_PRES']
|
@@ -720,101 +693,6 @@ def perform_longitude_regression(start_year, start_month, end_year, end_month):
|
|
720 |
# Visualization Functions
|
721 |
# -----------------------------
|
722 |
|
723 |
-
def generate_typhoon_tracks(filtered_data, typhoon_search):
|
724 |
-
"""Generate typhoon tracks visualization"""
|
725 |
-
fig = go.Figure()
|
726 |
-
for sid in filtered_data['SID'].unique():
|
727 |
-
storm_data = filtered_data[filtered_data['SID'] == sid]
|
728 |
-
phase = storm_data['ENSO_Phase'].iloc[0]
|
729 |
-
color = {'El Nino':'red','La Nina':'blue','Neutral':'green'}.get(phase, 'black')
|
730 |
-
fig.add_trace(go.Scattergeo(
|
731 |
-
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines',
|
732 |
-
name=storm_data['NAME'].iloc[0], line=dict(width=2, color=color)
|
733 |
-
))
|
734 |
-
if typhoon_search:
|
735 |
-
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
736 |
-
if mask.any():
|
737 |
-
storm_data = filtered_data[mask]
|
738 |
-
fig.add_trace(go.Scattergeo(
|
739 |
-
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines',
|
740 |
-
name=f'Matched: {typhoon_search}', line=dict(width=5, color='yellow')
|
741 |
-
))
|
742 |
-
fig.update_layout(
|
743 |
-
title='Typhoon Tracks',
|
744 |
-
geo=dict(projection_type='natural earth', showland=True),
|
745 |
-
height=700
|
746 |
-
)
|
747 |
-
return fig
|
748 |
-
|
749 |
-
def generate_wind_oni_scatter(filtered_data, typhoon_search):
|
750 |
-
"""Generate wind vs ONI scatter plot"""
|
751 |
-
fig = px.scatter(filtered_data, x='ONI', y='USA_WIND', color='Category',
|
752 |
-
hover_data=['NAME','Year','Category'],
|
753 |
-
title='Wind Speed vs ONI',
|
754 |
-
labels={'ONI':'ONI Value','USA_WIND':'Max Wind Speed (knots)'},
|
755 |
-
color_discrete_map=color_map)
|
756 |
-
if typhoon_search:
|
757 |
-
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
758 |
-
if mask.any():
|
759 |
-
fig.add_trace(go.Scatter(
|
760 |
-
x=filtered_data.loc[mask,'ONI'], y=filtered_data.loc[mask,'USA_WIND'],
|
761 |
-
mode='markers', marker=dict(size=10, color='red', symbol='star'),
|
762 |
-
name=f'Matched: {typhoon_search}',
|
763 |
-
text=filtered_data.loc[mask,'NAME']+' ('+filtered_data.loc[mask,'Year'].astype(str)+')'
|
764 |
-
))
|
765 |
-
return fig
|
766 |
-
|
767 |
-
def generate_pressure_oni_scatter(filtered_data, typhoon_search):
|
768 |
-
"""Generate pressure vs ONI scatter plot"""
|
769 |
-
fig = px.scatter(filtered_data, x='ONI', y='USA_PRES', color='Category',
|
770 |
-
hover_data=['NAME','Year','Category'],
|
771 |
-
title='Pressure vs ONI',
|
772 |
-
labels={'ONI':'ONI Value','USA_PRES':'Min Pressure (hPa)'},
|
773 |
-
color_discrete_map=color_map)
|
774 |
-
if typhoon_search:
|
775 |
-
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
776 |
-
if mask.any():
|
777 |
-
fig.add_trace(go.Scatter(
|
778 |
-
x=filtered_data.loc[mask,'ONI'], y=filtered_data.loc[mask,'USA_PRES'],
|
779 |
-
mode='markers', marker=dict(size=10, color='red', symbol='star'),
|
780 |
-
name=f'Matched: {typhoon_search}',
|
781 |
-
text=filtered_data.loc[mask,'NAME']+' ('+filtered_data.loc[mask,'Year'].astype(str)+')'
|
782 |
-
))
|
783 |
-
return fig
|
784 |
-
|
785 |
-
def generate_regression_analysis(filtered_data):
|
786 |
-
"""Generate regression analysis plot"""
|
787 |
-
fig = px.scatter(filtered_data, x='LON', y='ONI', hover_data=['NAME'],
|
788 |
-
title='Typhoon Generation Longitude vs ONI (All Years)')
|
789 |
-
if len(filtered_data) > 1:
|
790 |
-
X = np.array(filtered_data['LON']).reshape(-1,1)
|
791 |
-
y = filtered_data['ONI']
|
792 |
-
try:
|
793 |
-
model = sm.OLS(y, sm.add_constant(X)).fit()
|
794 |
-
y_pred = model.predict(sm.add_constant(X))
|
795 |
-
fig.add_trace(go.Scatter(x=filtered_data['LON'], y=y_pred, mode='lines', name='Regression Line'))
|
796 |
-
slope = model.params[1]
|
797 |
-
slopes_text = f"All Years Slope: {slope:.4f}"
|
798 |
-
except Exception as e:
|
799 |
-
slopes_text = f"Regression Error: {e}"
|
800 |
-
else:
|
801 |
-
slopes_text = "Insufficient data for regression"
|
802 |
-
return fig, slopes_text
|
803 |
-
|
804 |
-
def generate_main_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
805 |
-
"""Generate main analysis plots"""
|
806 |
-
start_date = datetime(start_year, start_month, 1)
|
807 |
-
end_date = datetime(end_year, end_month, 28)
|
808 |
-
filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy()
|
809 |
-
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
810 |
-
if enso_phase != 'all':
|
811 |
-
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
812 |
-
tracks_fig = generate_typhoon_tracks(filtered_data, typhoon_search)
|
813 |
-
wind_scatter = generate_wind_oni_scatter(filtered_data, typhoon_search)
|
814 |
-
pressure_scatter = generate_pressure_oni_scatter(filtered_data, typhoon_search)
|
815 |
-
regression_fig, slopes_text = generate_regression_analysis(filtered_data)
|
816 |
-
return tracks_fig, wind_scatter, pressure_scatter, regression_fig, slopes_text
|
817 |
-
|
818 |
def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
819 |
"""Get full typhoon tracks"""
|
820 |
start_date = datetime(start_year, start_month, 1)
|
@@ -876,21 +754,88 @@ def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, ty
|
|
876 |
|
877 |
def get_wind_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
878 |
"""Get wind analysis"""
|
879 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
880 |
regression = perform_wind_regression(start_year, start_month, end_year, end_month)
|
881 |
-
return
|
882 |
|
883 |
def get_pressure_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
884 |
"""Get pressure analysis"""
|
885 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
886 |
regression = perform_pressure_regression(start_year, start_month, end_year, end_month)
|
887 |
-
return
|
888 |
|
889 |
def get_longitude_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
890 |
"""Get longitude analysis"""
|
891 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
892 |
regression = perform_longitude_regression(start_year, start_month, end_year, end_month)
|
893 |
-
return
|
894 |
|
895 |
def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
|
896 |
"""Categorize typhoon by standard"""
|
@@ -921,246 +866,6 @@ def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
|
|
921 |
return 'Tropical Storm', atlantic_standard['Tropical Storm']['hex']
|
922 |
return 'Tropical Depression', atlantic_standard['Tropical Depression']['hex']
|
923 |
|
924 |
-
# -----------------------------
|
925 |
-
# TSNE Cluster Function
|
926 |
-
# -----------------------------
|
927 |
-
|
928 |
-
def update_route_clusters(start_year, start_month, end_year, end_month, enso_value, season):
|
929 |
-
"""Updated TSNE cluster function with mean curves"""
|
930 |
-
try:
|
931 |
-
# Merge raw typhoon data with ONI
|
932 |
-
raw_data = typhoon_data.copy()
|
933 |
-
if 'ISO_TIME' not in raw_data.columns:
|
934 |
-
logging.error("ISO_TIME column not found in typhoon data")
|
935 |
-
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "Error: ISO_TIME column missing"
|
936 |
-
|
937 |
-
raw_data['Year'] = raw_data['ISO_TIME'].dt.year
|
938 |
-
raw_data['Month'] = raw_data['ISO_TIME'].dt.strftime('%m')
|
939 |
-
merged_raw = pd.merge(raw_data, process_oni_data(oni_data), on=['Year','Month'], how='left')
|
940 |
-
|
941 |
-
# Filter by date
|
942 |
-
start_date = datetime(start_year, start_month, 1)
|
943 |
-
end_date = datetime(end_year, end_month, 28)
|
944 |
-
merged_raw = merged_raw[(merged_raw['ISO_TIME'] >= start_date) & (merged_raw['ISO_TIME'] <= end_date)]
|
945 |
-
logging.info(f"Total points after date filtering: {merged_raw.shape[0]}")
|
946 |
-
|
947 |
-
# Filter by ENSO phase if specified
|
948 |
-
merged_raw['ENSO_Phase'] = merged_raw['ONI'].apply(classify_enso_phases)
|
949 |
-
if enso_value != 'all':
|
950 |
-
merged_raw = merged_raw[merged_raw['ENSO_Phase'] == enso_value.capitalize()]
|
951 |
-
logging.info(f"Total points after ENSO filtering: {merged_raw.shape[0]}")
|
952 |
-
|
953 |
-
# Regional filtering for Western Pacific
|
954 |
-
wp_data = merged_raw[(merged_raw['LON'] >= 100) & (merged_raw['LON'] <= 180) &
|
955 |
-
(merged_raw['LAT'] >= 0) & (merged_raw['LAT'] <= 40)]
|
956 |
-
logging.info(f"Total points after WP regional filtering: {wp_data.shape[0]}")
|
957 |
-
if wp_data.empty:
|
958 |
-
logging.info("WP regional filter returned no data; using all filtered data.")
|
959 |
-
wp_data = merged_raw
|
960 |
-
|
961 |
-
# Group by storm ID
|
962 |
-
all_storms_data = []
|
963 |
-
for sid, group in wp_data.groupby('SID'):
|
964 |
-
group = group.sort_values('ISO_TIME')
|
965 |
-
times = pd.to_datetime(group['ISO_TIME']).values
|
966 |
-
lats = group['LAT'].astype(float).values
|
967 |
-
lons = group['LON'].astype(float).values
|
968 |
-
if len(lons) < 2:
|
969 |
-
continue
|
970 |
-
# Extract wind and pressure curves
|
971 |
-
wind = group['USA_WIND'].astype(float).values if 'USA_WIND' in group.columns else None
|
972 |
-
pres = group['USA_PRES'].astype(float).values if 'USA_PRES' in group.columns else None
|
973 |
-
all_storms_data.append((sid, lons, lats, times, wind, pres))
|
974 |
-
|
975 |
-
logging.info(f"Storms available for TSNE after grouping: {len(all_storms_data)}")
|
976 |
-
if not all_storms_data:
|
977 |
-
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms for clustering."
|
978 |
-
|
979 |
-
# Interpolate each storm's route to a common length
|
980 |
-
max_length = min(50, max(len(item[1]) for item in all_storms_data)) # Cap at 50 points
|
981 |
-
route_vectors = []
|
982 |
-
wind_curves = []
|
983 |
-
pres_curves = []
|
984 |
-
storm_ids = []
|
985 |
-
|
986 |
-
for sid, lons, lats, times, wind, pres in all_storms_data:
|
987 |
-
t = np.linspace(0, 1, len(lons))
|
988 |
-
t_new = np.linspace(0, 1, max_length)
|
989 |
-
try:
|
990 |
-
lon_interp = interp1d(t, lons, kind='linear', fill_value='extrapolate')(t_new)
|
991 |
-
lat_interp = interp1d(t, lats, kind='linear', fill_value='extrapolate')(t_new)
|
992 |
-
except Exception as ex:
|
993 |
-
logging.error(f"Interpolation error for storm {sid}: {ex}")
|
994 |
-
continue
|
995 |
-
|
996 |
-
route_vector = np.column_stack((lon_interp, lat_interp)).flatten()
|
997 |
-
if np.isnan(route_vector).any():
|
998 |
-
continue
|
999 |
-
|
1000 |
-
route_vectors.append(route_vector)
|
1001 |
-
storm_ids.append(sid)
|
1002 |
-
|
1003 |
-
# Interpolate wind and pressure
|
1004 |
-
if wind is not None and len(wind) >= 2:
|
1005 |
-
try:
|
1006 |
-
wind_interp = interp1d(t, wind, kind='linear', fill_value='extrapolate')(t_new)
|
1007 |
-
except Exception as ex:
|
1008 |
-
logging.error(f"Wind interpolation error for storm {sid}: {ex}")
|
1009 |
-
wind_interp = np.full(max_length, np.nan)
|
1010 |
-
else:
|
1011 |
-
wind_interp = np.full(max_length, np.nan)
|
1012 |
-
|
1013 |
-
if pres is not None and len(pres) >= 2:
|
1014 |
-
try:
|
1015 |
-
pres_interp = interp1d(t, pres, kind='linear', fill_value='extrapolate')(t_new)
|
1016 |
-
except Exception as ex:
|
1017 |
-
logging.error(f"Pressure interpolation error for storm {sid}: {ex}")
|
1018 |
-
pres_interp = np.full(max_length, np.nan)
|
1019 |
-
else:
|
1020 |
-
pres_interp = np.full(max_length, np.nan)
|
1021 |
-
|
1022 |
-
wind_curves.append(wind_interp)
|
1023 |
-
pres_curves.append(pres_interp)
|
1024 |
-
|
1025 |
-
logging.info(f"Storms with valid route vectors: {len(route_vectors)}")
|
1026 |
-
if len(route_vectors) == 0:
|
1027 |
-
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms after interpolation."
|
1028 |
-
|
1029 |
-
route_vectors = np.array(route_vectors)
|
1030 |
-
wind_curves = np.array(wind_curves)
|
1031 |
-
pres_curves = np.array(pres_curves)
|
1032 |
-
|
1033 |
-
# Run TSNE on route vectors
|
1034 |
-
if len(route_vectors) < 5:
|
1035 |
-
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "Need at least 5 storms for clustering."
|
1036 |
-
|
1037 |
-
tsne = TSNE(n_components=2, random_state=42, verbose=1, perplexity=min(30, len(route_vectors)-1))
|
1038 |
-
tsne_results = tsne.fit_transform(route_vectors)
|
1039 |
-
|
1040 |
-
# Dynamic DBSCAN
|
1041 |
-
selected_labels = None
|
1042 |
-
selected_eps = None
|
1043 |
-
for eps in np.linspace(1.0, 10.0, 91):
|
1044 |
-
dbscan = DBSCAN(eps=eps, min_samples=max(2, len(route_vectors)//10))
|
1045 |
-
labels = dbscan.fit_predict(tsne_results)
|
1046 |
-
clusters = set(labels) - {-1}
|
1047 |
-
if 2 <= len(clusters) <= min(10, len(route_vectors)//2):
|
1048 |
-
selected_labels = labels
|
1049 |
-
selected_eps = eps
|
1050 |
-
break
|
1051 |
-
|
1052 |
-
if selected_labels is None:
|
1053 |
-
selected_eps = 5.0
|
1054 |
-
dbscan = DBSCAN(eps=selected_eps, min_samples=max(2, len(route_vectors)//10))
|
1055 |
-
selected_labels = dbscan.fit_predict(tsne_results)
|
1056 |
-
|
1057 |
-
logging.info(f"Selected DBSCAN eps: {selected_eps:.2f} yielding {len(set(selected_labels)-{-1})} clusters.")
|
1058 |
-
|
1059 |
-
# TSNE scatter plot
|
1060 |
-
fig_tsne = go.Figure()
|
1061 |
-
colors = px.colors.qualitative.Set3
|
1062 |
-
unique_labels = sorted(set(selected_labels) - {-1})
|
1063 |
-
|
1064 |
-
for i, label in enumerate(unique_labels):
|
1065 |
-
indices = np.where(selected_labels == label)[0]
|
1066 |
-
fig_tsne.add_trace(go.Scatter(
|
1067 |
-
x=tsne_results[indices, 0],
|
1068 |
-
y=tsne_results[indices, 1],
|
1069 |
-
mode='markers',
|
1070 |
-
marker=dict(color=colors[i % len(colors)]),
|
1071 |
-
name=f"Cluster {label}"
|
1072 |
-
))
|
1073 |
-
|
1074 |
-
noise_indices = np.where(selected_labels == -1)[0]
|
1075 |
-
if len(noise_indices) > 0:
|
1076 |
-
fig_tsne.add_trace(go.Scatter(
|
1077 |
-
x=tsne_results[noise_indices, 0],
|
1078 |
-
y=tsne_results[noise_indices, 1],
|
1079 |
-
mode='markers',
|
1080 |
-
marker=dict(color='grey'),
|
1081 |
-
name='Noise'
|
1082 |
-
))
|
1083 |
-
|
1084 |
-
fig_tsne.update_layout(
|
1085 |
-
title="t-SNE of Storm Routes",
|
1086 |
-
xaxis_title="t-SNE Dim 1",
|
1087 |
-
yaxis_title="t-SNE Dim 2"
|
1088 |
-
)
|
1089 |
-
|
1090 |
-
# Compute mean routes and curves for each cluster
|
1091 |
-
fig_routes = go.Figure()
|
1092 |
-
cluster_stats = []
|
1093 |
-
|
1094 |
-
for i, label in enumerate(unique_labels):
|
1095 |
-
indices = np.where(selected_labels == label)[0]
|
1096 |
-
cluster_ids = [storm_ids[j] for j in indices]
|
1097 |
-
cluster_vectors = route_vectors[indices, :]
|
1098 |
-
mean_vector = np.mean(cluster_vectors, axis=0)
|
1099 |
-
mean_route = mean_vector.reshape((max_length, 2))
|
1100 |
-
mean_lon = mean_route[:, 0]
|
1101 |
-
mean_lat = mean_route[:, 1]
|
1102 |
-
|
1103 |
-
fig_routes.add_trace(go.Scattergeo(
|
1104 |
-
lon=mean_lon,
|
1105 |
-
lat=mean_lat,
|
1106 |
-
mode='lines',
|
1107 |
-
line=dict(width=4, color=colors[i % len(colors)]),
|
1108 |
-
name=f"Cluster {label} Mean Route"
|
1109 |
-
))
|
1110 |
-
|
1111 |
-
# Compute mean curves
|
1112 |
-
cluster_winds = wind_curves[indices, :]
|
1113 |
-
cluster_pres = pres_curves[indices, :]
|
1114 |
-
mean_wind_curve = np.nanmean(cluster_winds, axis=0)
|
1115 |
-
mean_pres_curve = np.nanmean(cluster_pres, axis=0)
|
1116 |
-
cluster_stats.append((label, mean_wind_curve, mean_pres_curve))
|
1117 |
-
|
1118 |
-
fig_routes.update_layout(
|
1119 |
-
title="Cluster Mean Routes",
|
1120 |
-
geo=dict(projection_type='natural earth', showland=True),
|
1121 |
-
height=600
|
1122 |
-
)
|
1123 |
-
|
1124 |
-
# Create cluster stats plot
|
1125 |
-
x_axis = np.linspace(0, 1, max_length)
|
1126 |
-
fig_stats = make_subplots(rows=2, cols=1, shared_xaxes=True,
|
1127 |
-
subplot_titles=("Mean Wind Speed (knots)", "Mean MSLP (hPa)"))
|
1128 |
-
|
1129 |
-
for i, (label, wind_curve, pres_curve) in enumerate(cluster_stats):
|
1130 |
-
fig_stats.add_trace(go.Scatter(
|
1131 |
-
x=x_axis,
|
1132 |
-
y=wind_curve,
|
1133 |
-
mode='lines',
|
1134 |
-
line=dict(width=2, color=colors[i % len(colors)]),
|
1135 |
-
name=f"Cluster {label} Mean Wind",
|
1136 |
-
showlegend=True
|
1137 |
-
), row=1, col=1)
|
1138 |
-
|
1139 |
-
fig_stats.add_trace(go.Scatter(
|
1140 |
-
x=x_axis,
|
1141 |
-
y=pres_curve,
|
1142 |
-
mode='lines',
|
1143 |
-
line=dict(width=2, color=colors[i % len(colors)]),
|
1144 |
-
name=f"Cluster {label} Mean MSLP",
|
1145 |
-
showlegend=False
|
1146 |
-
), row=2, col=1)
|
1147 |
-
|
1148 |
-
fig_stats.update_layout(
|
1149 |
-
title="Cluster Mean Curves",
|
1150 |
-
xaxis_title="Normalized Route Index",
|
1151 |
-
yaxis_title="Mean Wind Speed (knots)",
|
1152 |
-
xaxis2_title="Normalized Route Index",
|
1153 |
-
yaxis2_title="Mean MSLP (hPa)",
|
1154 |
-
height=600
|
1155 |
-
)
|
1156 |
-
|
1157 |
-
info = f"TSNE clustering complete. Selected eps: {selected_eps:.2f}. Clusters: {len(unique_labels)}. Total storms: {len(route_vectors)}."
|
1158 |
-
return fig_tsne, fig_routes, fig_stats, info
|
1159 |
-
|
1160 |
-
except Exception as e:
|
1161 |
-
logging.error(f"Error in TSNE clustering: {e}")
|
1162 |
-
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), f"Error in TSNE clustering: {e}"
|
1163 |
-
|
1164 |
# -----------------------------
|
1165 |
# Animation Functions
|
1166 |
# -----------------------------
|
@@ -1259,7 +964,7 @@ def simplified_track_video(year, basin, typhoon, standard):
|
|
1259 |
return generate_track_video_from_csv(year, storm_id, standard)
|
1260 |
|
1261 |
# -----------------------------
|
1262 |
-
#
|
1263 |
# -----------------------------
|
1264 |
|
1265 |
def update_typhoon_options_fixed(year, basin):
|
@@ -1313,149 +1018,190 @@ def update_typhoon_options_fixed(year, basin):
|
|
1313 |
return gr.update(choices=[], value=None)
|
1314 |
|
1315 |
# -----------------------------
|
1316 |
-
# Load & Process Data
|
1317 |
# -----------------------------
|
1318 |
|
1319 |
-
|
1320 |
-
|
1321 |
-
|
1322 |
-
|
1323 |
-
|
1324 |
-
|
1325 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1326 |
|
1327 |
# -----------------------------
|
1328 |
-
#
|
1329 |
# -----------------------------
|
1330 |
|
1331 |
-
|
1332 |
-
|
1333 |
-
|
1334 |
-
|
1335 |
-
|
1336 |
-
|
1337 |
-
|
|
|
|
|
|
|
1338 |
|
1339 |
-
|
1340 |
|
1341 |
-
|
1342 |
-
|
1343 |
-
|
1344 |
-
|
1345 |
-
|
1346 |
-
|
1347 |
-
|
1348 |
-
|
1349 |
-
|
1350 |
-
|
1351 |
-
|
1352 |
-
|
1353 |
-
""" % (len(oni_data), len(typhoon_data), len(merged_data)))
|
1354 |
|
1355 |
-
|
1356 |
-
|
1357 |
-
|
1358 |
-
|
1359 |
-
|
1360 |
-
|
1361 |
-
|
1362 |
-
|
1363 |
-
|
1364 |
-
|
1365 |
-
|
1366 |
-
|
1367 |
-
|
1368 |
-
|
1369 |
-
|
1370 |
-
|
1371 |
-
|
1372 |
-
|
1373 |
-
|
1374 |
-
|
1375 |
-
|
1376 |
-
|
1377 |
-
|
1378 |
-
|
1379 |
-
|
1380 |
-
|
1381 |
-
|
1382 |
-
|
1383 |
-
|
1384 |
-
|
1385 |
-
|
1386 |
-
|
1387 |
-
|
1388 |
-
|
1389 |
-
|
1390 |
-
|
1391 |
-
|
1392 |
-
|
1393 |
-
|
1394 |
-
|
1395 |
-
|
1396 |
-
|
1397 |
-
|
1398 |
-
|
1399 |
-
|
1400 |
-
|
1401 |
-
|
1402 |
-
|
1403 |
-
|
1404 |
-
|
1405 |
-
|
1406 |
-
|
1407 |
-
|
1408 |
-
|
1409 |
-
|
1410 |
-
|
1411 |
-
|
1412 |
-
|
1413 |
-
|
1414 |
-
|
1415 |
-
|
1416 |
-
|
1417 |
-
|
1418 |
-
|
1419 |
-
|
1420 |
-
|
1421 |
-
|
1422 |
-
|
1423 |
-
|
1424 |
-
|
1425 |
-
|
1426 |
-
|
1427 |
-
|
1428 |
-
|
1429 |
-
|
1430 |
-
|
1431 |
-
|
1432 |
-
|
1433 |
-
|
1434 |
-
|
1435 |
-
|
1436 |
-
|
1437 |
-
|
1438 |
-
|
1439 |
-
|
1440 |
-
|
1441 |
-
|
1442 |
-
|
1443 |
-
|
1444 |
-
|
1445 |
-
|
1446 |
-
|
1447 |
-
|
1448 |
-
|
1449 |
-
|
1450 |
-
|
1451 |
-
|
1452 |
-
|
1453 |
-
|
1454 |
-
|
1455 |
-
|
1456 |
-
|
1457 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
1458 |
|
1459 |
-
# Fixed launch command
|
1460 |
if __name__ == "__main__":
|
1461 |
demo.launch()
|
|
|
45 |
format='%(asctime)s - %(levelname)s - %(message)s'
|
46 |
)
|
47 |
|
48 |
+
# Remove argument parser to simplify startup
|
49 |
+
DATA_PATH = '/tmp/typhoon_data' if 'SPACE_ID' in os.environ else tempfile.gettempdir()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
|
51 |
# Ensure directory exists and is writable
|
52 |
try:
|
|
|
106 |
'Tropical Depression': {'wind_speed': 0, 'color': 'Gray', 'hex': '#808080'}
|
107 |
}
|
108 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
# -----------------------------
|
110 |
# Utility Functions for HF Spaces
|
111 |
# -----------------------------
|
|
|
140 |
except Exception as e:
|
141 |
logging.error(f"Error saving file {file_path}: {e}")
|
142 |
# Clean up temp file if it exists
|
143 |
+
temp_path = file_path + '.tmp'
|
144 |
if os.path.exists(temp_path):
|
145 |
try:
|
146 |
os.remove(temp_path)
|
|
|
352 |
logging.error(f"Missing critical columns. Available: {list(df.columns)}")
|
353 |
return None
|
354 |
|
355 |
+
# Clean and standardize the data with format specification
|
356 |
if 'ISO_TIME' in df.columns:
|
357 |
+
df['ISO_TIME'] = pd.to_datetime(df['ISO_TIME'], format='%Y-%m-%d %H:%M:%S', errors='coerce')
|
358 |
|
359 |
# Clean numeric columns
|
360 |
numeric_columns = ['LAT', 'LON', 'WMO_WIND', 'WMO_PRES', 'USA_WIND', 'USA_PRES']
|
|
|
693 |
# Visualization Functions
|
694 |
# -----------------------------
|
695 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
696 |
def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
697 |
"""Get full typhoon tracks"""
|
698 |
start_date = datetime(start_year, start_month, 1)
|
|
|
754 |
|
755 |
def get_wind_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
756 |
"""Get wind analysis"""
|
757 |
+
start_date = datetime(start_year, start_month, 1)
|
758 |
+
end_date = datetime(end_year, end_month, 28)
|
759 |
+
filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy()
|
760 |
+
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
761 |
+
if enso_phase != 'all':
|
762 |
+
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
763 |
+
|
764 |
+
fig = px.scatter(filtered_data, x='ONI', y='USA_WIND', color='Category',
|
765 |
+
hover_data=['NAME','Year','Category'],
|
766 |
+
title='Wind Speed vs ONI',
|
767 |
+
labels={'ONI':'ONI Value','USA_WIND':'Max Wind Speed (knots)'},
|
768 |
+
color_discrete_map=color_map)
|
769 |
+
|
770 |
+
if typhoon_search:
|
771 |
+
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
772 |
+
if mask.any():
|
773 |
+
fig.add_trace(go.Scatter(
|
774 |
+
x=filtered_data.loc[mask,'ONI'], y=filtered_data.loc[mask,'USA_WIND'],
|
775 |
+
mode='markers', marker=dict(size=10, color='red', symbol='star'),
|
776 |
+
name=f'Matched: {typhoon_search}',
|
777 |
+
text=filtered_data.loc[mask,'NAME']+' ('+filtered_data.loc[mask,'Year'].astype(str)+')'
|
778 |
+
))
|
779 |
+
|
780 |
regression = perform_wind_regression(start_year, start_month, end_year, end_month)
|
781 |
+
return fig, regression
|
782 |
|
783 |
def get_pressure_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
784 |
"""Get pressure analysis"""
|
785 |
+
start_date = datetime(start_year, start_month, 1)
|
786 |
+
end_date = datetime(end_year, end_month, 28)
|
787 |
+
filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy()
|
788 |
+
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
789 |
+
if enso_phase != 'all':
|
790 |
+
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
791 |
+
|
792 |
+
fig = px.scatter(filtered_data, x='ONI', y='USA_PRES', color='Category',
|
793 |
+
hover_data=['NAME','Year','Category'],
|
794 |
+
title='Pressure vs ONI',
|
795 |
+
labels={'ONI':'ONI Value','USA_PRES':'Min Pressure (hPa)'},
|
796 |
+
color_discrete_map=color_map)
|
797 |
+
|
798 |
+
if typhoon_search:
|
799 |
+
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
800 |
+
if mask.any():
|
801 |
+
fig.add_trace(go.Scatter(
|
802 |
+
x=filtered_data.loc[mask,'ONI'], y=filtered_data.loc[mask,'USA_PRES'],
|
803 |
+
mode='markers', marker=dict(size=10, color='red', symbol='star'),
|
804 |
+
name=f'Matched: {typhoon_search}',
|
805 |
+
text=filtered_data.loc[mask,'NAME']+' ('+filtered_data.loc[mask,'Year'].astype(str)+')'
|
806 |
+
))
|
807 |
+
|
808 |
regression = perform_pressure_regression(start_year, start_month, end_year, end_month)
|
809 |
+
return fig, regression
|
810 |
|
811 |
def get_longitude_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
812 |
"""Get longitude analysis"""
|
813 |
+
start_date = datetime(start_year, start_month, 1)
|
814 |
+
end_date = datetime(end_year, end_month, 28)
|
815 |
+
filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy()
|
816 |
+
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
817 |
+
if enso_phase != 'all':
|
818 |
+
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
819 |
+
|
820 |
+
fig = px.scatter(filtered_data, x='LON', y='ONI', hover_data=['NAME'],
|
821 |
+
title='Typhoon Generation Longitude vs ONI (All Years)')
|
822 |
+
|
823 |
+
if len(filtered_data) > 1:
|
824 |
+
X = np.array(filtered_data['LON']).reshape(-1,1)
|
825 |
+
y = filtered_data['ONI']
|
826 |
+
try:
|
827 |
+
model = sm.OLS(y, sm.add_constant(X)).fit()
|
828 |
+
y_pred = model.predict(sm.add_constant(X))
|
829 |
+
fig.add_trace(go.Scatter(x=filtered_data['LON'], y=y_pred, mode='lines', name='Regression Line'))
|
830 |
+
slope = model.params[1]
|
831 |
+
slopes_text = f"All Years Slope: {slope:.4f}"
|
832 |
+
except Exception as e:
|
833 |
+
slopes_text = f"Regression Error: {e}"
|
834 |
+
else:
|
835 |
+
slopes_text = "Insufficient data for regression"
|
836 |
+
|
837 |
regression = perform_longitude_regression(start_year, start_month, end_year, end_month)
|
838 |
+
return fig, slopes_text, regression
|
839 |
|
840 |
def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
|
841 |
"""Categorize typhoon by standard"""
|
|
|
866 |
return 'Tropical Storm', atlantic_standard['Tropical Storm']['hex']
|
867 |
return 'Tropical Depression', atlantic_standard['Tropical Depression']['hex']
|
868 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
869 |
# -----------------------------
|
870 |
# Animation Functions
|
871 |
# -----------------------------
|
|
|
964 |
return generate_track_video_from_csv(year, storm_id, standard)
|
965 |
|
966 |
# -----------------------------
|
967 |
+
# Update Typhoon Options Function
|
968 |
# -----------------------------
|
969 |
|
970 |
def update_typhoon_options_fixed(year, basin):
|
|
|
1018 |
return gr.update(choices=[], value=None)
|
1019 |
|
1020 |
# -----------------------------
|
1021 |
+
# Load & Process Data
|
1022 |
# -----------------------------
|
1023 |
|
1024 |
+
# Global variables initialization
|
1025 |
+
oni_data = None
|
1026 |
+
typhoon_data = None
|
1027 |
+
merged_data = None
|
1028 |
+
|
1029 |
+
def initialize_data():
|
1030 |
+
"""Initialize all data safely"""
|
1031 |
+
global oni_data, typhoon_data, merged_data
|
1032 |
+
try:
|
1033 |
+
logging.info("Starting data loading process...")
|
1034 |
+
update_oni_data()
|
1035 |
+
oni_data, typhoon_data = load_data_fixed(ONI_DATA_PATH, TYPHOON_DATA_PATH)
|
1036 |
+
|
1037 |
+
if oni_data is not None and typhoon_data is not None:
|
1038 |
+
oni_long = process_oni_data(oni_data)
|
1039 |
+
typhoon_max = process_typhoon_data(typhoon_data)
|
1040 |
+
merged_data = merge_data(oni_long, typhoon_max)
|
1041 |
+
logging.info("Data loading complete.")
|
1042 |
+
else:
|
1043 |
+
logging.error("Failed to load required data")
|
1044 |
+
# Create minimal fallback data
|
1045 |
+
oni_data = pd.DataFrame({'Year': [2000], 'Jan': [0], 'Feb': [0], 'Mar': [0], 'Apr': [0],
|
1046 |
+
'May': [0], 'Jun': [0], 'Jul': [0], 'Aug': [0], 'Sep': [0],
|
1047 |
+
'Oct': [0], 'Nov': [0], 'Dec': [0]})
|
1048 |
+
typhoon_data = create_fallback_typhoon_data()
|
1049 |
+
oni_long = process_oni_data(oni_data)
|
1050 |
+
typhoon_max = process_typhoon_data(typhoon_data)
|
1051 |
+
merged_data = merge_data(oni_long, typhoon_max)
|
1052 |
+
except Exception as e:
|
1053 |
+
logging.error(f"Error during data initialization: {e}")
|
1054 |
+
# Create minimal fallback data
|
1055 |
+
oni_data = pd.DataFrame({'Year': [2000], 'Jan': [0], 'Feb': [0], 'Mar': [0], 'Apr': [0],
|
1056 |
+
'May': [0], 'Jun': [0], 'Jul': [0], 'Aug': [0], 'Sep': [0],
|
1057 |
+
'Oct': [0], 'Nov': [0], 'Dec': [0]})
|
1058 |
+
typhoon_data = create_fallback_typhoon_data()
|
1059 |
+
oni_long = process_oni_data(oni_data)
|
1060 |
+
typhoon_max = process_typhoon_data(typhoon_data)
|
1061 |
+
merged_data = merge_data(oni_long, typhoon_max)
|
1062 |
+
|
1063 |
+
# Initialize data
|
1064 |
+
initialize_data()
|
1065 |
|
1066 |
# -----------------------------
|
1067 |
+
# Simplified Gradio Interface
|
1068 |
# -----------------------------
|
1069 |
|
1070 |
+
def create_interface():
|
1071 |
+
"""Create the Gradio interface with error handling"""
|
1072 |
+
try:
|
1073 |
+
# Initialize components with safe defaults
|
1074 |
+
with gr.Blocks() as demo:
|
1075 |
+
gr.Markdown("# Typhoon Analysis Dashboard")
|
1076 |
+
|
1077 |
+
with gr.Tab("Overview"):
|
1078 |
+
gr.Markdown(f"""
|
1079 |
+
## Welcome to the Typhoon Analysis Dashboard
|
1080 |
|
1081 |
+
This dashboard allows you to analyze typhoon data in relation to ENSO phases.
|
1082 |
|
1083 |
+
### Features:
|
1084 |
+
- **Track Visualization**: View typhoon tracks by time period and ENSO phase.
|
1085 |
+
- **Wind Analysis**: Examine wind speed vs ONI relationships.
|
1086 |
+
- **Pressure Analysis**: Analyze pressure vs ONI relationships.
|
1087 |
+
- **Longitude Analysis**: Study typhoon generation longitude vs ONI.
|
1088 |
+
- **Path Animation**: View animated storm tracks on a world map.
|
1089 |
+
|
1090 |
+
### Data Status:
|
1091 |
+
- **ONI Data**: {len(oni_data)} years loaded
|
1092 |
+
- **Typhoon Data**: {len(typhoon_data)} records loaded
|
1093 |
+
- **Merged Data**: {len(merged_data)} typhoons with ONI values
|
1094 |
+
""")
|
|
|
1095 |
|
1096 |
+
with gr.Tab("Track Visualization"):
|
1097 |
+
with gr.Row():
|
1098 |
+
start_year = gr.Number(label="Start Year", value=2000)
|
1099 |
+
start_month = gr.Dropdown(label="Start Month", choices=list(range(1, 13)), value=1)
|
1100 |
+
end_year = gr.Number(label="End Year", value=2024)
|
1101 |
+
end_month = gr.Dropdown(label="End Month", choices=list(range(1, 13)), value=6)
|
1102 |
+
enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all', 'El Nino', 'La Nina', 'Neutral'], value='all')
|
1103 |
+
typhoon_search = gr.Textbox(label="Typhoon Search")
|
1104 |
+
analyze_btn = gr.Button("Generate Tracks")
|
1105 |
+
tracks_plot = gr.Plot()
|
1106 |
+
typhoon_count = gr.Textbox(label="Number of Typhoons Displayed")
|
1107 |
+
analyze_btn.click(
|
1108 |
+
fn=get_full_tracks,
|
1109 |
+
inputs=[start_year, start_month, end_year, end_month, enso_phase, typhoon_search],
|
1110 |
+
outputs=[tracks_plot, typhoon_count]
|
1111 |
+
)
|
1112 |
+
|
1113 |
+
with gr.Tab("Wind Analysis"):
|
1114 |
+
with gr.Row():
|
1115 |
+
wind_start_year = gr.Number(label="Start Year", value=2000)
|
1116 |
+
wind_start_month = gr.Dropdown(label="Start Month", choices=list(range(1, 13)), value=1)
|
1117 |
+
wind_end_year = gr.Number(label="End Year", value=2024)
|
1118 |
+
wind_end_month = gr.Dropdown(label="End Month", choices=list(range(1, 13)), value=6)
|
1119 |
+
wind_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all', 'El Nino', 'La Nina', 'Neutral'], value='all')
|
1120 |
+
wind_typhoon_search = gr.Textbox(label="Typhoon Search")
|
1121 |
+
wind_analyze_btn = gr.Button("Generate Wind Analysis")
|
1122 |
+
wind_scatter = gr.Plot()
|
1123 |
+
wind_regression_results = gr.Textbox(label="Wind Regression Results")
|
1124 |
+
wind_analyze_btn.click(
|
1125 |
+
fn=get_wind_analysis,
|
1126 |
+
inputs=[wind_start_year, wind_start_month, wind_end_year, wind_end_month, wind_enso_phase, wind_typhoon_search],
|
1127 |
+
outputs=[wind_scatter, wind_regression_results]
|
1128 |
+
)
|
1129 |
+
|
1130 |
+
with gr.Tab("Pressure Analysis"):
|
1131 |
+
with gr.Row():
|
1132 |
+
pressure_start_year = gr.Number(label="Start Year", value=2000)
|
1133 |
+
pressure_start_month = gr.Dropdown(label="Start Month", choices=list(range(1, 13)), value=1)
|
1134 |
+
pressure_end_year = gr.Number(label="End Year", value=2024)
|
1135 |
+
pressure_end_month = gr.Dropdown(label="End Month", choices=list(range(1, 13)), value=6)
|
1136 |
+
pressure_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all', 'El Nino', 'La Nina', 'Neutral'], value='all')
|
1137 |
+
pressure_typhoon_search = gr.Textbox(label="Typhoon Search")
|
1138 |
+
pressure_analyze_btn = gr.Button("Generate Pressure Analysis")
|
1139 |
+
pressure_scatter = gr.Plot()
|
1140 |
+
pressure_regression_results = gr.Textbox(label="Pressure Regression Results")
|
1141 |
+
pressure_analyze_btn.click(
|
1142 |
+
fn=get_pressure_analysis,
|
1143 |
+
inputs=[pressure_start_year, pressure_start_month, pressure_end_year, pressure_end_month, pressure_enso_phase, pressure_typhoon_search],
|
1144 |
+
outputs=[pressure_scatter, pressure_regression_results]
|
1145 |
+
)
|
1146 |
+
|
1147 |
+
with gr.Tab("Longitude Analysis"):
|
1148 |
+
with gr.Row():
|
1149 |
+
lon_start_year = gr.Number(label="Start Year", value=2000)
|
1150 |
+
lon_start_month = gr.Dropdown(label="Start Month", choices=list(range(1, 13)), value=1)
|
1151 |
+
lon_end_year = gr.Number(label="End Year", value=2000)
|
1152 |
+
lon_end_month = gr.Dropdown(label="End Month", choices=list(range(1, 13)), value=6)
|
1153 |
+
lon_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all', 'El Nino', 'La Nina', 'Neutral'], value='all')
|
1154 |
+
lon_typhoon_search = gr.Textbox(label="Typhoon Search (Optional)")
|
1155 |
+
lon_analyze_btn = gr.Button("Generate Longitude Analysis")
|
1156 |
+
regression_plot = gr.Plot()
|
1157 |
+
slopes_text = gr.Textbox(label="Regression Slopes")
|
1158 |
+
lon_regression_results = gr.Textbox(label="Longitude Regression Results")
|
1159 |
+
lon_analyze_btn.click(
|
1160 |
+
fn=get_longitude_analysis,
|
1161 |
+
inputs=[lon_start_year, lon_start_month, lon_end_year, lon_end_month, lon_enso_phase, lon_typhoon_search],
|
1162 |
+
outputs=[regression_plot, slopes_text, lon_regression_results]
|
1163 |
+
)
|
1164 |
+
|
1165 |
+
with gr.Tab("Tropical Cyclone Path Animation"):
|
1166 |
+
with gr.Row():
|
1167 |
+
year_dropdown = gr.Dropdown(label="Year", choices=[str(y) for y in range(1950, 2025)], value="2000")
|
1168 |
+
basin_constant = gr.Textbox(value="All Basins", visible=False)
|
1169 |
+
with gr.Row():
|
1170 |
+
typhoon_dropdown = gr.Dropdown(label="Tropical Cyclone")
|
1171 |
+
standard_dropdown = gr.Dropdown(label="Classification Standard", choices=['atlantic', 'taiwan'], value='atlantic')
|
1172 |
+
animate_btn = gr.Button("Generate Animation")
|
1173 |
+
path_video = gr.Video()
|
1174 |
+
animation_info = gr.Markdown("""
|
1175 |
+
### Animation Instructions
|
1176 |
+
1. Select a year.
|
1177 |
+
2. Choose a tropical cyclone from the populated list.
|
1178 |
+
3. Select a classification standard (Atlantic or Taiwan).
|
1179 |
+
4. Click "Generate Animation".
|
1180 |
+
5. The animation displays the storm track on a world map with dynamic sidebar information.
|
1181 |
+
""")
|
1182 |
+
# Update typhoon dropdown
|
1183 |
+
year_dropdown.change(
|
1184 |
+
fn=update_typhoon_options_fixed,
|
1185 |
+
inputs=[year_dropdown, basin_constant],
|
1186 |
+
outputs=typhoon_dropdown
|
1187 |
+
)
|
1188 |
+
animate_btn.click(
|
1189 |
+
fn=simplified_track_video,
|
1190 |
+
inputs=[year_dropdown, basin_constant, typhoon_dropdown, standard_dropdown],
|
1191 |
+
outputs=path_video
|
1192 |
+
)
|
1193 |
+
|
1194 |
+
return demo
|
1195 |
+
except Exception as e:
|
1196 |
+
logging.error(f"Error creating Gradio interface: {e}")
|
1197 |
+
# Create a minimal fallback interface
|
1198 |
+
with gr.Blocks() as demo:
|
1199 |
+
gr.Markdown("# Typhoon Analysis Dashboard")
|
1200 |
+
gr.Markdown("**Error**: Could not load full interface. Please check logs.")
|
1201 |
+
return demo
|
1202 |
+
|
1203 |
+
# Create and launch the interface
|
1204 |
+
demo = create_interface()
|
1205 |
|
|
|
1206 |
if __name__ == "__main__":
|
1207 |
demo.launch()
|