Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1038,7 +1038,7 @@ def cluster_storms(embedding, method='dbscan', eps=0.5, min_samples=3):
|
|
1038 |
return np.array([0] * len(embedding))
|
1039 |
|
1040 |
def create_separate_clustering_plots(storm_features, typhoon_data, method='umap'):
|
1041 |
-
"""Create separate plots for clustering analysis -
|
1042 |
try:
|
1043 |
# Validate inputs
|
1044 |
if storm_features is None or storm_features.empty:
|
@@ -1073,34 +1073,102 @@ def create_separate_clustering_plots(storm_features, typhoon_data, method='umap'
|
|
1073 |
storm_features_viz['NAME'] = 'UNNAMED'
|
1074 |
storm_features_viz['SEASON'] = 2000
|
1075 |
|
1076 |
-
#
|
1077 |
-
|
1078 |
-
|
1079 |
-
|
1080 |
-
|
1081 |
-
|
1082 |
-
|
1083 |
-
|
1084 |
-
|
1085 |
-
|
1086 |
-
|
1087 |
-
|
1088 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1089 |
)
|
1090 |
|
1091 |
-
# 2.
|
1092 |
fig_routes = go.Figure()
|
1093 |
|
1094 |
-
|
|
|
|
|
1095 |
for i, cluster in enumerate(unique_clusters):
|
1096 |
-
if cluster == -1: # Skip noise for route visualization
|
1097 |
-
continue
|
1098 |
-
|
1099 |
cluster_storm_ids = storm_features_viz[storm_features_viz['cluster'] == cluster]['SID'].tolist()
|
1100 |
color = CLUSTER_COLORS[i % len(CLUSTER_COLORS)]
|
1101 |
|
1102 |
-
|
1103 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1104 |
try:
|
1105 |
storm_track = typhoon_data[typhoon_data['SID'] == sid].sort_values('ISO_TIME')
|
1106 |
if len(storm_track) > 1:
|
@@ -1110,33 +1178,73 @@ def create_separate_clustering_plots(storm_features, typhoon_data, method='umap'
|
|
1110 |
|
1111 |
if len(storm_track) > 1:
|
1112 |
storm_name = storm_track['NAME'].iloc[0] if pd.notna(storm_track['NAME'].iloc[0]) else 'UNNAMED'
|
|
|
|
|
|
|
|
|
|
|
|
|
1113 |
|
1114 |
fig_routes.add_trace(
|
1115 |
go.Scattergeo(
|
1116 |
lon=storm_track['LON'],
|
1117 |
lat=storm_track['LAT'],
|
1118 |
mode='lines+markers',
|
1119 |
-
line=dict(color=color, width=
|
1120 |
-
marker=dict(color=color, size=
|
1121 |
-
name=f'C{cluster}: {storm_name}'
|
1122 |
-
showlegend=
|
|
|
1123 |
hovertemplate=(
|
1124 |
-
f'<b>{storm_name}</b><br>'
|
1125 |
'Lat: %{lat:.1f}°<br>'
|
1126 |
'Lon: %{lon:.1f}°<br>'
|
1127 |
-
f'
|
|
|
1128 |
'<extra></extra>'
|
1129 |
)
|
1130 |
)
|
1131 |
)
|
1132 |
-
|
1133 |
except Exception as track_error:
|
1134 |
logging.warning(f"Error adding track for storm {sid}: {track_error}")
|
1135 |
continue
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1136 |
|
1137 |
-
# Update route map layout
|
1138 |
fig_routes.update_layout(
|
1139 |
-
title="
|
1140 |
geo=dict(
|
1141 |
projection_type="natural earth",
|
1142 |
showland=True,
|
@@ -1146,20 +1254,33 @@ def create_separate_clustering_plots(storm_features, typhoon_data, method='umap'
|
|
1146 |
showcoastlines=True,
|
1147 |
coastlinecolor="Gray",
|
1148 |
center=dict(lat=20, lon=140)
|
1149 |
-
)
|
|
|
|
|
1150 |
)
|
1151 |
|
1152 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1153 |
fig_pressure = go.Figure()
|
1154 |
|
1155 |
for i, cluster in enumerate(unique_clusters):
|
1156 |
-
if cluster == -1:
|
1157 |
-
continue
|
1158 |
-
|
1159 |
cluster_storm_ids = storm_features_viz[storm_features_viz['cluster'] == cluster]['SID'].tolist()
|
1160 |
color = CLUSTER_COLORS[i % len(CLUSTER_COLORS)]
|
1161 |
|
1162 |
-
|
|
|
1163 |
try:
|
1164 |
storm_track = typhoon_data[typhoon_data['SID'] == sid].sort_values('ISO_TIME')
|
1165 |
if len(storm_track) > 1 and 'USA_PRES' in storm_track.columns:
|
@@ -1168,147 +1289,182 @@ def create_separate_clustering_plots(storm_features, typhoon_data, method='umap'
|
|
1168 |
storm_name = storm_track['NAME'].iloc[0] if pd.notna(storm_track['NAME'].iloc[0]) else 'UNNAMED'
|
1169 |
time_hours = range(len(pressure_values))
|
1170 |
|
|
|
|
|
|
|
1171 |
fig_pressure.add_trace(
|
1172 |
go.Scatter(
|
1173 |
-
x=
|
1174 |
y=pressure_values,
|
1175 |
mode='lines',
|
1176 |
-
line=dict(color=color, width=2),
|
1177 |
name=f'C{cluster}: {storm_name}' if j == 0 else None,
|
1178 |
showlegend=(j == 0),
|
|
|
1179 |
hovertemplate=(
|
1180 |
-
f'<b>{storm_name}</b><br>'
|
1181 |
-
'
|
1182 |
'Pressure: %{y:.0f} hPa<br>'
|
1183 |
'<extra></extra>'
|
1184 |
-
)
|
|
|
1185 |
)
|
1186 |
)
|
|
|
1187 |
except Exception as e:
|
1188 |
continue
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1189 |
|
1190 |
fig_pressure.update_layout(
|
1191 |
-
title="Pressure Evolution by
|
1192 |
-
xaxis_title="
|
1193 |
-
yaxis_title="Pressure (hPa)"
|
|
|
1194 |
)
|
1195 |
|
1196 |
-
# 4.
|
1197 |
fig_wind = go.Figure()
|
1198 |
|
1199 |
for i, cluster in enumerate(unique_clusters):
|
1200 |
-
if cluster == -1:
|
1201 |
-
continue
|
1202 |
-
|
1203 |
cluster_storm_ids = storm_features_viz[storm_features_viz['cluster'] == cluster]['SID'].tolist()
|
1204 |
color = CLUSTER_COLORS[i % len(CLUSTER_COLORS)]
|
1205 |
|
1206 |
-
|
|
|
1207 |
try:
|
1208 |
storm_track = typhoon_data[typhoon_data['SID'] == sid].sort_values('ISO_TIME')
|
1209 |
if len(storm_track) > 1 and 'USA_WIND' in storm_track.columns:
|
1210 |
wind_values = pd.to_numeric(storm_track['USA_WIND'], errors='coerce').dropna()
|
1211 |
if len(wind_values) > 0:
|
1212 |
storm_name = storm_track['NAME'].iloc[0] if pd.notna(storm_track['NAME'].iloc[0]) else 'UNNAMED'
|
1213 |
-
|
|
|
|
|
1214 |
|
1215 |
fig_wind.add_trace(
|
1216 |
go.Scatter(
|
1217 |
-
x=
|
1218 |
y=wind_values,
|
1219 |
mode='lines',
|
1220 |
-
line=dict(color=color, width=2),
|
1221 |
name=f'C{cluster}: {storm_name}' if j == 0 else None,
|
1222 |
showlegend=(j == 0),
|
|
|
1223 |
hovertemplate=(
|
1224 |
-
f'<b>{storm_name}</b><br>'
|
1225 |
-
'
|
1226 |
'Wind: %{y:.0f} kt<br>'
|
1227 |
'<extra></extra>'
|
1228 |
-
)
|
|
|
1229 |
)
|
1230 |
)
|
|
|
1231 |
except Exception as e:
|
1232 |
continue
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1233 |
|
1234 |
fig_wind.update_layout(
|
1235 |
-
title="Wind Speed Evolution by
|
1236 |
-
xaxis_title="
|
1237 |
-
yaxis_title="Wind Speed (kt)"
|
|
|
1238 |
)
|
1239 |
|
1240 |
-
# Generate
|
1241 |
try:
|
1242 |
-
|
1243 |
-
|
1244 |
-
|
1245 |
-
|
1246 |
-
|
1247 |
-
|
1248 |
-
|
1249 |
-
|
1250 |
-
'avg_curvature': 'avg_curvature',
|
1251 |
-
'SID': 'SID'
|
1252 |
-
}
|
1253 |
-
|
1254 |
-
# Filter to only existing columns
|
1255 |
-
existing_cols = {k: v for k, v in available_cols.items() if v in storm_features_viz.columns}
|
1256 |
|
1257 |
-
|
1258 |
-
|
1259 |
-
|
1260 |
-
|
1261 |
-
|
1262 |
-
storm_count
|
1263 |
-
|
1264 |
-
|
1265 |
-
|
1266 |
-
continue
|
1267 |
-
|
1268 |
-
stats_text += f"CLUSTER {cluster}: {storm_count} storms\n"
|
1269 |
-
|
1270 |
-
# Add available statistics
|
1271 |
-
if 'USA_WIND_max' in cluster_data.columns:
|
1272 |
-
wind_mean = cluster_data['USA_WIND_max'].mean()
|
1273 |
-
wind_std = cluster_data['USA_WIND_max'].std()
|
1274 |
-
stats_text += f" Intensity: {wind_mean:.1f} +/- {wind_std:.1f} kt\n"
|
1275 |
-
|
1276 |
-
if 'USA_PRES_min' in cluster_data.columns:
|
1277 |
-
pres_mean = cluster_data['USA_PRES_min'].mean()
|
1278 |
-
pres_std = cluster_data['USA_PRES_min'].std()
|
1279 |
-
stats_text += f" Pressure: {pres_mean:.1f} +/- {pres_std:.1f} hPa\n"
|
1280 |
-
|
1281 |
-
if 'track_length' in cluster_data.columns:
|
1282 |
-
track_mean = cluster_data['track_length'].mean()
|
1283 |
-
track_std = cluster_data['track_length'].std()
|
1284 |
-
stats_text += f" Track Length: {track_mean:.1f} +/- {track_std:.1f} points\n"
|
1285 |
-
|
1286 |
-
if 'genesis_lat' in cluster_data.columns and 'genesis_lon' in cluster_data.columns:
|
1287 |
-
lat_mean = cluster_data['genesis_lat'].mean()
|
1288 |
-
lon_mean = cluster_data['genesis_lon'].mean()
|
1289 |
-
stats_text += f" Genesis Region: {lat_mean:.1f}°N, {lon_mean:.1f}°E\n"
|
1290 |
-
|
1291 |
-
stats_text += "\n"
|
1292 |
-
|
1293 |
-
# Add feature importance summary
|
1294 |
-
stats_text += "CLUSTERING FEATURES USED:\n"
|
1295 |
-
stats_text += f" - Total features: {len(feature_cols)}\n"
|
1296 |
-
stats_text += f" - Available features: {', '.join(feature_cols[:5])}...\n\n"
|
1297 |
|
1298 |
-
stats_text += f"
|
1299 |
-
stats_text += f"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1300 |
|
1301 |
-
|
1302 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1303 |
|
1304 |
except Exception as stats_error:
|
1305 |
-
|
1306 |
-
stats_text = f"Error generating cluster statistics: {str(stats_error)}"
|
1307 |
|
1308 |
return fig_cluster, fig_routes, fig_pressure, fig_wind, stats_text
|
1309 |
|
1310 |
except Exception as e:
|
1311 |
-
logging.error(f"Error in clustering analysis: {e}")
|
1312 |
import traceback
|
1313 |
traceback.print_exc()
|
1314 |
|
@@ -1562,21 +1718,13 @@ def predict_storm_route_and_intensity(lat, lon, month, oni_value, models=None, f
|
|
1562 |
}
|
1563 |
|
1564 |
def create_route_visualization(prediction_results, show_uncertainty=True):
|
1565 |
-
"""Create comprehensive route and intensity visualization - FIXED"""
|
1566 |
try:
|
1567 |
if 'route_forecast' not in prediction_results or not prediction_results['route_forecast']:
|
1568 |
return None, "No route forecast data available"
|
1569 |
|
1570 |
route_data = prediction_results['route_forecast']
|
1571 |
|
1572 |
-
# Create subplot with route map and intensity evolution - FIXED
|
1573 |
-
fig = make_subplots(
|
1574 |
-
rows=1, cols=2,
|
1575 |
-
subplot_titles=('Forecast Track', 'Intensity Evolution'),
|
1576 |
-
specs=[[{"type": "geo"}, {"type": "xy"}]], # Changed to xy for regular plot
|
1577 |
-
column_widths=[0.6, 0.4]
|
1578 |
-
)
|
1579 |
-
|
1580 |
# Extract data for plotting
|
1581 |
hours = [point['hour'] for point in route_data]
|
1582 |
lats = [point['lat'] for point in route_data]
|
@@ -1584,18 +1732,31 @@ def create_route_visualization(prediction_results, show_uncertainty=True):
|
|
1584 |
intensities = [point['intensity_kt'] for point in route_data]
|
1585 |
categories = [point['category'] for point in route_data]
|
1586 |
|
1587 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1588 |
for i in range(len(route_data)):
|
1589 |
point = route_data[i]
|
1590 |
color = enhanced_color_map.get(point['category'], 'rgb(128,128,128)')
|
1591 |
|
1592 |
-
# Convert rgb
|
|
|
|
|
1593 |
if i == 0: # Current position
|
1594 |
marker_size = 15
|
1595 |
opacity = 1.0
|
|
|
1596 |
else:
|
1597 |
-
marker_size =
|
1598 |
-
opacity = 1.0 - (i / len(route_data)) * 0.
|
|
|
1599 |
|
1600 |
fig.add_trace(
|
1601 |
go.Scattergeo(
|
@@ -1604,12 +1765,13 @@ def create_route_visualization(prediction_results, show_uncertainty=True):
|
|
1604 |
mode='markers',
|
1605 |
marker=dict(
|
1606 |
size=marker_size,
|
1607 |
-
color=
|
1608 |
opacity=opacity,
|
1609 |
-
|
|
|
1610 |
),
|
1611 |
-
name=f"Hour {point['hour']}" if i %
|
1612 |
-
showlegend=(i %
|
1613 |
hovertemplate=(
|
1614 |
f"<b>Hour {point['hour']}</b><br>"
|
1615 |
f"Position: {point['lat']:.1f}°N, {point['lon']:.1f}°E<br>"
|
@@ -1627,7 +1789,7 @@ def create_route_visualization(prediction_results, show_uncertainty=True):
|
|
1627 |
lon=lons,
|
1628 |
lat=lats,
|
1629 |
mode='lines',
|
1630 |
-
line=dict(color='black', width=
|
1631 |
name='Forecast Track',
|
1632 |
showlegend=True
|
1633 |
),
|
@@ -1642,15 +1804,12 @@ def create_route_visualization(prediction_results, show_uncertainty=True):
|
|
1642 |
uncertainty_lons_lower = []
|
1643 |
|
1644 |
for i, point in enumerate(route_data):
|
1645 |
-
|
1646 |
-
uncertainty = 0.5 + (i / len(route_data)) * 2.0 # degrees
|
1647 |
-
|
1648 |
uncertainty_lats_upper.append(point['lat'] + uncertainty)
|
1649 |
uncertainty_lats_lower.append(point['lat'] - uncertainty)
|
1650 |
uncertainty_lons_upper.append(point['lon'] + uncertainty)
|
1651 |
uncertainty_lons_lower.append(point['lon'] - uncertainty)
|
1652 |
|
1653 |
-
# Add uncertainty cone
|
1654 |
uncertainty_lats = uncertainty_lats_upper + uncertainty_lats_lower[::-1]
|
1655 |
uncertainty_lons = uncertainty_lons_upper + uncertainty_lons_lower[::-1]
|
1656 |
|
@@ -1660,54 +1819,69 @@ def create_route_visualization(prediction_results, show_uncertainty=True):
|
|
1660 |
lat=uncertainty_lats,
|
1661 |
mode='lines',
|
1662 |
fill='toself',
|
1663 |
-
fillcolor='rgba(128,128,128,0.
|
1664 |
-
line=dict(color='rgba(128,128,128,0.
|
1665 |
name='Uncertainty Cone',
|
1666 |
showlegend=True
|
1667 |
),
|
1668 |
row=1, col=1
|
1669 |
)
|
1670 |
|
1671 |
-
# Intensity evolution plot
|
1672 |
fig.add_trace(
|
1673 |
go.Scatter(
|
1674 |
x=hours,
|
1675 |
y=intensities,
|
1676 |
mode='lines+markers',
|
1677 |
line=dict(color='red', width=3),
|
1678 |
-
marker=dict(size=
|
1679 |
-
name='Intensity
|
1680 |
-
|
1681 |
-
"Hour: %{x}<br>"
|
1682 |
-
"Intensity: %{y:.0f} kt<br>"
|
1683 |
-
"<extra></extra>"
|
1684 |
-
)
|
1685 |
),
|
1686 |
-
row=
|
1687 |
)
|
1688 |
|
1689 |
-
# Add category
|
1690 |
thresholds = [34, 64, 83, 96, 113, 137]
|
1691 |
threshold_names = ['TS', 'C1', 'C2', 'C3', 'C4', 'C5']
|
1692 |
|
1693 |
for thresh, name in zip(thresholds, threshold_names):
|
1694 |
-
fig.
|
1695 |
-
|
1696 |
-
|
1697 |
-
|
1698 |
-
|
1699 |
-
|
1700 |
-
|
|
|
|
|
|
|
|
|
1701 |
)
|
1702 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1703 |
# Update layout
|
1704 |
fig.update_layout(
|
1705 |
-
title_text="
|
1706 |
showlegend=True,
|
1707 |
-
height=
|
1708 |
)
|
1709 |
|
1710 |
-
# Update geo layout
|
1711 |
fig.update_geos(
|
1712 |
projection_type="natural earth",
|
1713 |
showland=True,
|
@@ -1716,14 +1890,16 @@ def create_route_visualization(prediction_results, show_uncertainty=True):
|
|
1716 |
oceancolor="LightBlue",
|
1717 |
showcoastlines=True,
|
1718 |
coastlinecolor="Gray",
|
1719 |
-
center=dict(lat=lats
|
1720 |
resolution=50,
|
1721 |
row=1, col=1
|
1722 |
)
|
1723 |
|
1724 |
-
# Update
|
1725 |
-
fig.update_xaxes(title_text="Forecast Hour", row=
|
1726 |
-
fig.update_yaxes(title_text="Intensity (kt)", row=
|
|
|
|
|
1727 |
|
1728 |
# Generate detailed forecast text
|
1729 |
current = prediction_results['current_prediction']
|
@@ -1738,14 +1914,14 @@ CURRENT CONDITIONS:
|
|
1738 |
|
1739 |
FORECAST TRACK (72-HOUR):
|
1740 |
• Initial Position: {lats[0]:.1f}°N, {lons[0]:.1f}°E
|
1741 |
-
• 24-hour Position: {lats[4]:.1f}°N, {lons[4]:.1f}°E
|
1742 |
-
• 48-hour Position: {lats[8]:.1f}°N, {lons[8]:.1f}°E
|
1743 |
• 72-hour Position: {lats[-1]:.1f}°N, {lons[-1]:.1f}°E
|
1744 |
|
1745 |
INTENSITY EVOLUTION:
|
1746 |
• Current: {intensities[0]:.0f} kt ({categories[0]})
|
1747 |
-
• 24-hour: {intensities[4]:.0f} kt ({categories[4]})
|
1748 |
-
• 48-hour: {intensities[8]:.0f} kt ({categories[8]})
|
1749 |
• 72-hour: {intensities[-1]:.0f} kt ({categories[-1]})
|
1750 |
|
1751 |
CONFIDENCE LEVELS:
|
@@ -1760,7 +1936,11 @@ MODEL: {prediction_results['model_info']}
|
|
1760 |
return fig, forecast_text.strip()
|
1761 |
|
1762 |
except Exception as e:
|
1763 |
-
|
|
|
|
|
|
|
|
|
1764 |
|
1765 |
# -----------------------------
|
1766 |
# Regression Functions (Original)
|
|
|
1038 |
return np.array([0] * len(embedding))
|
1039 |
|
1040 |
def create_separate_clustering_plots(storm_features, typhoon_data, method='umap'):
|
1041 |
+
"""Create separate plots for clustering analysis - ENHANCED CLARITY VERSION"""
|
1042 |
try:
|
1043 |
# Validate inputs
|
1044 |
if storm_features is None or storm_features.empty:
|
|
|
1073 |
storm_features_viz['NAME'] = 'UNNAMED'
|
1074 |
storm_features_viz['SEASON'] = 2000
|
1075 |
|
1076 |
+
# Get unique clusters and assign distinct colors
|
1077 |
+
unique_clusters = sorted([c for c in storm_features_viz['cluster'].unique() if c != -1])
|
1078 |
+
noise_count = len(storm_features_viz[storm_features_viz['cluster'] == -1])
|
1079 |
+
|
1080 |
+
# 1. Enhanced clustering scatter plot with clear cluster identification
|
1081 |
+
fig_cluster = go.Figure()
|
1082 |
+
|
1083 |
+
# Add noise points first
|
1084 |
+
if noise_count > 0:
|
1085 |
+
noise_data = storm_features_viz[storm_features_viz['cluster'] == -1]
|
1086 |
+
fig_cluster.add_trace(
|
1087 |
+
go.Scatter(
|
1088 |
+
x=noise_data['dim1'],
|
1089 |
+
y=noise_data['dim2'],
|
1090 |
+
mode='markers',
|
1091 |
+
marker=dict(color='lightgray', size=8, opacity=0.5, symbol='x'),
|
1092 |
+
name=f'Noise ({noise_count} storms)',
|
1093 |
+
hovertemplate=(
|
1094 |
+
'<b>%{customdata[0]}</b><br>'
|
1095 |
+
'Season: %{customdata[1]}<br>'
|
1096 |
+
'Cluster: Noise<br>'
|
1097 |
+
f'{method.upper()} Dim 1: %{{x:.2f}}<br>'
|
1098 |
+
f'{method.upper()} Dim 2: %{{y:.2f}}<br>'
|
1099 |
+
'<extra></extra>'
|
1100 |
+
),
|
1101 |
+
customdata=np.column_stack((
|
1102 |
+
noise_data['NAME'].fillna('UNNAMED'),
|
1103 |
+
noise_data['SEASON'].fillna(2000)
|
1104 |
+
))
|
1105 |
+
)
|
1106 |
+
)
|
1107 |
+
|
1108 |
+
# Add clusters with distinct colors and shapes
|
1109 |
+
cluster_symbols = ['circle', 'square', 'diamond', 'triangle-up', 'triangle-down',
|
1110 |
+
'pentagon', 'hexagon', 'star', 'cross', 'circle-open']
|
1111 |
+
|
1112 |
+
for i, cluster in enumerate(unique_clusters):
|
1113 |
+
cluster_data = storm_features_viz[storm_features_viz['cluster'] == cluster]
|
1114 |
+
color = CLUSTER_COLORS[i % len(CLUSTER_COLORS)]
|
1115 |
+
symbol = cluster_symbols[i % len(cluster_symbols)]
|
1116 |
+
|
1117 |
+
fig_cluster.add_trace(
|
1118 |
+
go.Scatter(
|
1119 |
+
x=cluster_data['dim1'],
|
1120 |
+
y=cluster_data['dim2'],
|
1121 |
+
mode='markers',
|
1122 |
+
marker=dict(color=color, size=10, symbol=symbol, line=dict(width=1, color='white')),
|
1123 |
+
name=f'Cluster {cluster} ({len(cluster_data)} storms)',
|
1124 |
+
hovertemplate=(
|
1125 |
+
'<b>%{customdata[0]}</b><br>'
|
1126 |
+
'Season: %{customdata[1]}<br>'
|
1127 |
+
f'Cluster: {cluster}<br>'
|
1128 |
+
f'{method.upper()} Dim 1: %{{x:.2f}}<br>'
|
1129 |
+
f'{method.upper()} Dim 2: %{{y:.2f}}<br>'
|
1130 |
+
'Intensity: %{customdata[2]:.0f} kt<br>'
|
1131 |
+
'<extra></extra>'
|
1132 |
+
),
|
1133 |
+
customdata=np.column_stack((
|
1134 |
+
cluster_data['NAME'].fillna('UNNAMED'),
|
1135 |
+
cluster_data['SEASON'].fillna(2000),
|
1136 |
+
cluster_data['USA_WIND_max'].fillna(0)
|
1137 |
+
))
|
1138 |
+
)
|
1139 |
+
)
|
1140 |
+
|
1141 |
+
fig_cluster.update_layout(
|
1142 |
+
title=f'Storm Clustering Analysis using {method.upper()}<br><sub>Each symbol/color represents a distinct storm pattern group</sub>',
|
1143 |
+
xaxis_title=f'{method.upper()} Dimension 1',
|
1144 |
+
yaxis_title=f'{method.upper()} Dimension 2',
|
1145 |
+
height=600,
|
1146 |
+
showlegend=True
|
1147 |
)
|
1148 |
|
1149 |
+
# 2. ENHANCED route map with cluster legends and clearer representation
|
1150 |
fig_routes = go.Figure()
|
1151 |
|
1152 |
+
# Create a comprehensive legend showing cluster characteristics
|
1153 |
+
cluster_info_text = []
|
1154 |
+
|
1155 |
for i, cluster in enumerate(unique_clusters):
|
|
|
|
|
|
|
1156 |
cluster_storm_ids = storm_features_viz[storm_features_viz['cluster'] == cluster]['SID'].tolist()
|
1157 |
color = CLUSTER_COLORS[i % len(CLUSTER_COLORS)]
|
1158 |
|
1159 |
+
# Get cluster statistics for legend
|
1160 |
+
cluster_data = storm_features_viz[storm_features_viz['cluster'] == cluster]
|
1161 |
+
avg_intensity = cluster_data['USA_WIND_max'].mean() if 'USA_WIND_max' in cluster_data.columns else 0
|
1162 |
+
avg_pressure = cluster_data['USA_PRES_min'].mean() if 'USA_PRES_min' in cluster_data.columns else 1000
|
1163 |
+
|
1164 |
+
cluster_info_text.append(
|
1165 |
+
f"Cluster {cluster}: {len(cluster_storm_ids)} storms, "
|
1166 |
+
f"Avg: {avg_intensity:.0f}kt/{avg_pressure:.0f}hPa"
|
1167 |
+
)
|
1168 |
+
|
1169 |
+
# Add multiple storms per cluster with clear identification
|
1170 |
+
storms_added = 0
|
1171 |
+
for j, sid in enumerate(cluster_storm_ids[:8]): # Show up to 8 storms per cluster
|
1172 |
try:
|
1173 |
storm_track = typhoon_data[typhoon_data['SID'] == sid].sort_values('ISO_TIME')
|
1174 |
if len(storm_track) > 1:
|
|
|
1178 |
|
1179 |
if len(storm_track) > 1:
|
1180 |
storm_name = storm_track['NAME'].iloc[0] if pd.notna(storm_track['NAME'].iloc[0]) else 'UNNAMED'
|
1181 |
+
storm_season = storm_track['SEASON'].iloc[0] if 'SEASON' in storm_track.columns else 'Unknown'
|
1182 |
+
|
1183 |
+
# Vary line style for different storms in same cluster
|
1184 |
+
line_styles = ['solid', 'dash', 'dot', 'dashdot']
|
1185 |
+
line_style = line_styles[j % len(line_styles)]
|
1186 |
+
line_width = 3 if j == 0 else 2 # First storm thicker
|
1187 |
|
1188 |
fig_routes.add_trace(
|
1189 |
go.Scattergeo(
|
1190 |
lon=storm_track['LON'],
|
1191 |
lat=storm_track['LAT'],
|
1192 |
mode='lines+markers',
|
1193 |
+
line=dict(color=color, width=line_width, dash=line_style),
|
1194 |
+
marker=dict(color=color, size=3),
|
1195 |
+
name=f'C{cluster}: {storm_name} ({storm_season})',
|
1196 |
+
showlegend=True,
|
1197 |
+
legendgroup=f'cluster_{cluster}',
|
1198 |
hovertemplate=(
|
1199 |
+
f'<b>Cluster {cluster}: {storm_name}</b><br>'
|
1200 |
'Lat: %{lat:.1f}°<br>'
|
1201 |
'Lon: %{lon:.1f}°<br>'
|
1202 |
+
f'Season: {storm_season}<br>'
|
1203 |
+
f'Pattern Group: {cluster}<br>'
|
1204 |
'<extra></extra>'
|
1205 |
)
|
1206 |
)
|
1207 |
)
|
1208 |
+
storms_added += 1
|
1209 |
except Exception as track_error:
|
1210 |
logging.warning(f"Error adding track for storm {sid}: {track_error}")
|
1211 |
continue
|
1212 |
+
|
1213 |
+
# Add cluster centroid marker
|
1214 |
+
if len(cluster_storm_ids) > 0:
|
1215 |
+
# Calculate average genesis location for cluster
|
1216 |
+
cluster_storms = storm_features_viz[storm_features_viz['cluster'] == cluster]
|
1217 |
+
if 'genesis_lat' in cluster_storms.columns and 'genesis_lon' in cluster_storms.columns:
|
1218 |
+
avg_lat = cluster_storms['genesis_lat'].mean()
|
1219 |
+
avg_lon = cluster_storms['genesis_lon'].mean()
|
1220 |
+
|
1221 |
+
fig_routes.add_trace(
|
1222 |
+
go.Scattergeo(
|
1223 |
+
lon=[avg_lon],
|
1224 |
+
lat=[avg_lat],
|
1225 |
+
mode='markers',
|
1226 |
+
marker=dict(
|
1227 |
+
color=color,
|
1228 |
+
size=20,
|
1229 |
+
symbol='star',
|
1230 |
+
line=dict(width=2, color='white')
|
1231 |
+
),
|
1232 |
+
name=f'C{cluster} Center',
|
1233 |
+
showlegend=True,
|
1234 |
+
legendgroup=f'cluster_{cluster}',
|
1235 |
+
hovertemplate=(
|
1236 |
+
f'<b>Cluster {cluster} Genesis Center</b><br>'
|
1237 |
+
f'Avg Position: {avg_lat:.1f}°N, {avg_lon:.1f}°E<br>'
|
1238 |
+
f'Storms: {len(cluster_storm_ids)}<br>'
|
1239 |
+
f'Avg Intensity: {avg_intensity:.0f} kt<br>'
|
1240 |
+
'<extra></extra>'
|
1241 |
+
)
|
1242 |
+
)
|
1243 |
+
)
|
1244 |
|
1245 |
+
# Update route map layout with enhanced information
|
1246 |
fig_routes.update_layout(
|
1247 |
+
title=f"Storm Routes by {method.upper()} Clusters<br><sub>Different line styles = different storms in same cluster | Stars = cluster centers</sub>",
|
1248 |
geo=dict(
|
1249 |
projection_type="natural earth",
|
1250 |
showland=True,
|
|
|
1254 |
showcoastlines=True,
|
1255 |
coastlinecolor="Gray",
|
1256 |
center=dict(lat=20, lon=140)
|
1257 |
+
),
|
1258 |
+
height=600,
|
1259 |
+
showlegend=True
|
1260 |
)
|
1261 |
|
1262 |
+
# Add cluster info annotation
|
1263 |
+
cluster_summary = "<br>".join(cluster_info_text)
|
1264 |
+
fig_routes.add_annotation(
|
1265 |
+
text=f"<b>Cluster Summary:</b><br>{cluster_summary}",
|
1266 |
+
xref="paper", yref="paper",
|
1267 |
+
x=0.02, y=0.98,
|
1268 |
+
showarrow=False,
|
1269 |
+
align="left",
|
1270 |
+
bgcolor="rgba(255,255,255,0.8)",
|
1271 |
+
bordercolor="gray",
|
1272 |
+
borderwidth=1
|
1273 |
+
)
|
1274 |
+
|
1275 |
+
# 3. Enhanced pressure evolution plot with cluster identification
|
1276 |
fig_pressure = go.Figure()
|
1277 |
|
1278 |
for i, cluster in enumerate(unique_clusters):
|
|
|
|
|
|
|
1279 |
cluster_storm_ids = storm_features_viz[storm_features_viz['cluster'] == cluster]['SID'].tolist()
|
1280 |
color = CLUSTER_COLORS[i % len(CLUSTER_COLORS)]
|
1281 |
|
1282 |
+
cluster_pressures = []
|
1283 |
+
for j, sid in enumerate(cluster_storm_ids[:5]): # Limit to 5 storms per cluster
|
1284 |
try:
|
1285 |
storm_track = typhoon_data[typhoon_data['SID'] == sid].sort_values('ISO_TIME')
|
1286 |
if len(storm_track) > 1 and 'USA_PRES' in storm_track.columns:
|
|
|
1289 |
storm_name = storm_track['NAME'].iloc[0] if pd.notna(storm_track['NAME'].iloc[0]) else 'UNNAMED'
|
1290 |
time_hours = range(len(pressure_values))
|
1291 |
|
1292 |
+
# Normalize time to show relative progression
|
1293 |
+
normalized_time = np.linspace(0, 100, len(pressure_values))
|
1294 |
+
|
1295 |
fig_pressure.add_trace(
|
1296 |
go.Scatter(
|
1297 |
+
x=normalized_time,
|
1298 |
y=pressure_values,
|
1299 |
mode='lines',
|
1300 |
+
line=dict(color=color, width=2, dash='solid' if j == 0 else 'dash'),
|
1301 |
name=f'C{cluster}: {storm_name}' if j == 0 else None,
|
1302 |
showlegend=(j == 0),
|
1303 |
+
legendgroup=f'pressure_cluster_{cluster}',
|
1304 |
hovertemplate=(
|
1305 |
+
f'<b>Cluster {cluster}: {storm_name}</b><br>'
|
1306 |
+
'Progress: %{x:.0f}%<br>'
|
1307 |
'Pressure: %{y:.0f} hPa<br>'
|
1308 |
'<extra></extra>'
|
1309 |
+
),
|
1310 |
+
opacity=0.8 if j == 0 else 0.5
|
1311 |
)
|
1312 |
)
|
1313 |
+
cluster_pressures.extend(pressure_values)
|
1314 |
except Exception as e:
|
1315 |
continue
|
1316 |
+
|
1317 |
+
# Add cluster average line
|
1318 |
+
if cluster_pressures:
|
1319 |
+
avg_pressure = np.mean(cluster_pressures)
|
1320 |
+
fig_pressure.add_hline(
|
1321 |
+
y=avg_pressure,
|
1322 |
+
line_dash="dot",
|
1323 |
+
line_color=color,
|
1324 |
+
annotation_text=f"C{cluster} Avg: {avg_pressure:.0f}",
|
1325 |
+
annotation_position="right"
|
1326 |
+
)
|
1327 |
|
1328 |
fig_pressure.update_layout(
|
1329 |
+
title=f"Pressure Evolution by {method.upper()} Clusters<br><sub>Normalized timeline (0-100%) | Dotted lines = cluster averages</sub>",
|
1330 |
+
xaxis_title="Storm Progress (%)",
|
1331 |
+
yaxis_title="Pressure (hPa)",
|
1332 |
+
height=500
|
1333 |
)
|
1334 |
|
1335 |
+
# 4. Enhanced wind evolution plot
|
1336 |
fig_wind = go.Figure()
|
1337 |
|
1338 |
for i, cluster in enumerate(unique_clusters):
|
|
|
|
|
|
|
1339 |
cluster_storm_ids = storm_features_viz[storm_features_viz['cluster'] == cluster]['SID'].tolist()
|
1340 |
color = CLUSTER_COLORS[i % len(CLUSTER_COLORS)]
|
1341 |
|
1342 |
+
cluster_winds = []
|
1343 |
+
for j, sid in enumerate(cluster_storm_ids[:5]): # Limit to 5 storms per cluster
|
1344 |
try:
|
1345 |
storm_track = typhoon_data[typhoon_data['SID'] == sid].sort_values('ISO_TIME')
|
1346 |
if len(storm_track) > 1 and 'USA_WIND' in storm_track.columns:
|
1347 |
wind_values = pd.to_numeric(storm_track['USA_WIND'], errors='coerce').dropna()
|
1348 |
if len(wind_values) > 0:
|
1349 |
storm_name = storm_track['NAME'].iloc[0] if pd.notna(storm_track['NAME'].iloc[0]) else 'UNNAMED'
|
1350 |
+
|
1351 |
+
# Normalize time to show relative progression
|
1352 |
+
normalized_time = np.linspace(0, 100, len(wind_values))
|
1353 |
|
1354 |
fig_wind.add_trace(
|
1355 |
go.Scatter(
|
1356 |
+
x=normalized_time,
|
1357 |
y=wind_values,
|
1358 |
mode='lines',
|
1359 |
+
line=dict(color=color, width=2, dash='solid' if j == 0 else 'dash'),
|
1360 |
name=f'C{cluster}: {storm_name}' if j == 0 else None,
|
1361 |
showlegend=(j == 0),
|
1362 |
+
legendgroup=f'wind_cluster_{cluster}',
|
1363 |
hovertemplate=(
|
1364 |
+
f'<b>Cluster {cluster}: {storm_name}</b><br>'
|
1365 |
+
'Progress: %{x:.0f}%<br>'
|
1366 |
'Wind: %{y:.0f} kt<br>'
|
1367 |
'<extra></extra>'
|
1368 |
+
),
|
1369 |
+
opacity=0.8 if j == 0 else 0.5
|
1370 |
)
|
1371 |
)
|
1372 |
+
cluster_winds.extend(wind_values)
|
1373 |
except Exception as e:
|
1374 |
continue
|
1375 |
+
|
1376 |
+
# Add cluster average line
|
1377 |
+
if cluster_winds:
|
1378 |
+
avg_wind = np.mean(cluster_winds)
|
1379 |
+
fig_wind.add_hline(
|
1380 |
+
y=avg_wind,
|
1381 |
+
line_dash="dot",
|
1382 |
+
line_color=color,
|
1383 |
+
annotation_text=f"C{cluster} Avg: {avg_wind:.0f}",
|
1384 |
+
annotation_position="right"
|
1385 |
+
)
|
1386 |
|
1387 |
fig_wind.update_layout(
|
1388 |
+
title=f"Wind Speed Evolution by {method.upper()} Clusters<br><sub>Normalized timeline (0-100%) | Dotted lines = cluster averages</sub>",
|
1389 |
+
xaxis_title="Storm Progress (%)",
|
1390 |
+
yaxis_title="Wind Speed (kt)",
|
1391 |
+
height=500
|
1392 |
)
|
1393 |
|
1394 |
+
# Generate enhanced cluster statistics with clear explanations
|
1395 |
try:
|
1396 |
+
stats_text = f"ENHANCED {method.upper()} CLUSTER ANALYSIS RESULTS\n" + "="*60 + "\n\n"
|
1397 |
+
stats_text += f"🔍 DIMENSIONALITY REDUCTION: {method.upper()}\n"
|
1398 |
+
stats_text += f"🎯 CLUSTERING ALGORITHM: DBSCAN (automatic pattern discovery)\n"
|
1399 |
+
stats_text += f"📊 TOTAL STORMS ANALYZED: {len(storm_features_viz)}\n"
|
1400 |
+
stats_text += f"🎨 CLUSTERS DISCOVERED: {len(unique_clusters)}\n"
|
1401 |
+
if noise_count > 0:
|
1402 |
+
stats_text += f"❌ NOISE POINTS: {noise_count} storms (don't fit clear patterns)\n"
|
1403 |
+
stats_text += "\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
1404 |
|
1405 |
+
for cluster in sorted(storm_features_viz['cluster'].unique()):
|
1406 |
+
cluster_data = storm_features_viz[storm_features_viz['cluster'] == cluster]
|
1407 |
+
storm_count = len(cluster_data)
|
1408 |
+
|
1409 |
+
if cluster == -1:
|
1410 |
+
stats_text += f"❌ NOISE GROUP: {storm_count} storms\n"
|
1411 |
+
stats_text += " → These storms don't follow the main patterns\n"
|
1412 |
+
stats_text += " → May represent unique or rare storm behaviors\n\n"
|
1413 |
+
continue
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1414 |
|
1415 |
+
stats_text += f"🎯 CLUSTER {cluster}: {storm_count} storms\n"
|
1416 |
+
stats_text += f" 🎨 Color: {CLUSTER_COLORS[cluster % len(CLUSTER_COLORS)]}\n"
|
1417 |
+
|
1418 |
+
# Add detailed statistics if available
|
1419 |
+
if 'USA_WIND_max' in cluster_data.columns:
|
1420 |
+
wind_mean = cluster_data['USA_WIND_max'].mean()
|
1421 |
+
wind_std = cluster_data['USA_WIND_max'].std()
|
1422 |
+
stats_text += f" 💨 Intensity: {wind_mean:.1f} ± {wind_std:.1f} kt\n"
|
1423 |
+
|
1424 |
+
if 'USA_PRES_min' in cluster_data.columns:
|
1425 |
+
pres_mean = cluster_data['USA_PRES_min'].mean()
|
1426 |
+
pres_std = cluster_data['USA_PRES_min'].std()
|
1427 |
+
stats_text += f" 🌡️ Pressure: {pres_mean:.1f} ± {pres_std:.1f} hPa\n"
|
1428 |
+
|
1429 |
+
if 'track_length' in cluster_data.columns:
|
1430 |
+
track_mean = cluster_data['track_length'].mean()
|
1431 |
+
stats_text += f" 📏 Avg Track Length: {track_mean:.1f} points\n"
|
1432 |
+
|
1433 |
+
if 'genesis_lat' in cluster_data.columns and 'genesis_lon' in cluster_data.columns:
|
1434 |
+
lat_mean = cluster_data['genesis_lat'].mean()
|
1435 |
+
lon_mean = cluster_data['genesis_lon'].mean()
|
1436 |
+
stats_text += f" 🎯 Genesis Region: {lat_mean:.1f}°N, {lon_mean:.1f}°E\n"
|
1437 |
+
|
1438 |
+
# Add interpretation
|
1439 |
+
if wind_mean < 50:
|
1440 |
+
stats_text += " 💡 Pattern: Weaker storm group\n"
|
1441 |
+
elif wind_mean > 100:
|
1442 |
+
stats_text += " 💡 Pattern: Intense storm group\n"
|
1443 |
+
else:
|
1444 |
+
stats_text += " 💡 Pattern: Moderate intensity group\n"
|
1445 |
|
1446 |
+
stats_text += "\n"
|
1447 |
+
|
1448 |
+
# Add explanation of the analysis
|
1449 |
+
stats_text += "📖 INTERPRETATION GUIDE:\n"
|
1450 |
+
stats_text += f"• {method.upper()} reduces storm characteristics to 2D for visualization\n"
|
1451 |
+
stats_text += "• DBSCAN finds natural groupings without preset number of clusters\n"
|
1452 |
+
stats_text += "• Each cluster represents storms with similar behavior patterns\n"
|
1453 |
+
stats_text += "• Route colors match cluster colors from the similarity plot\n"
|
1454 |
+
stats_text += "• Stars on map show average genesis locations for each cluster\n"
|
1455 |
+
stats_text += "• Temporal plots show how each cluster behaves over time\n\n"
|
1456 |
+
|
1457 |
+
stats_text += f"🔧 FEATURES USED FOR CLUSTERING:\n"
|
1458 |
+
stats_text += f" Total: {len(feature_cols)} storm characteristics\n"
|
1459 |
+
stats_text += f" Including: intensity, pressure, track shape, genesis location\n"
|
1460 |
|
1461 |
except Exception as stats_error:
|
1462 |
+
stats_text = f"Error generating enhanced statistics: {str(stats_error)}"
|
|
|
1463 |
|
1464 |
return fig_cluster, fig_routes, fig_pressure, fig_wind, stats_text
|
1465 |
|
1466 |
except Exception as e:
|
1467 |
+
logging.error(f"Error in enhanced clustering analysis: {e}")
|
1468 |
import traceback
|
1469 |
traceback.print_exc()
|
1470 |
|
|
|
1718 |
}
|
1719 |
|
1720 |
def create_route_visualization(prediction_results, show_uncertainty=True):
|
1721 |
+
"""Create comprehensive route and intensity visualization - COMPLETELY FIXED"""
|
1722 |
try:
|
1723 |
if 'route_forecast' not in prediction_results or not prediction_results['route_forecast']:
|
1724 |
return None, "No route forecast data available"
|
1725 |
|
1726 |
route_data = prediction_results['route_forecast']
|
1727 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1728 |
# Extract data for plotting
|
1729 |
hours = [point['hour'] for point in route_data]
|
1730 |
lats = [point['lat'] for point in route_data]
|
|
|
1732 |
intensities = [point['intensity_kt'] for point in route_data]
|
1733 |
categories = [point['category'] for point in route_data]
|
1734 |
|
1735 |
+
# Create separate figures to avoid geo/regular plot conflicts
|
1736 |
+
fig = make_subplots(
|
1737 |
+
rows=2, cols=2,
|
1738 |
+
subplot_titles=('Forecast Track', 'Intensity Evolution', 'Position Uncertainty', 'Category Timeline'),
|
1739 |
+
specs=[[{"type": "geo", "colspan": 2}, None],
|
1740 |
+
[{"type": "xy"}, {"type": "xy"}]],
|
1741 |
+
vertical_spacing=0.1
|
1742 |
+
)
|
1743 |
+
|
1744 |
+
# 1. Route visualization on geographic plot
|
1745 |
for i in range(len(route_data)):
|
1746 |
point = route_data[i]
|
1747 |
color = enhanced_color_map.get(point['category'], 'rgb(128,128,128)')
|
1748 |
|
1749 |
+
# Convert rgb to regular color format
|
1750 |
+
color_hex = rgb_string_to_hex(color)
|
1751 |
+
|
1752 |
if i == 0: # Current position
|
1753 |
marker_size = 15
|
1754 |
opacity = 1.0
|
1755 |
+
symbol = 'star'
|
1756 |
else:
|
1757 |
+
marker_size = 8 + (point['intensity_kt'] / 20) # Size based on intensity
|
1758 |
+
opacity = max(0.3, 1.0 - (i / len(route_data)) * 0.7)
|
1759 |
+
symbol = 'circle'
|
1760 |
|
1761 |
fig.add_trace(
|
1762 |
go.Scattergeo(
|
|
|
1765 |
mode='markers',
|
1766 |
marker=dict(
|
1767 |
size=marker_size,
|
1768 |
+
color=color_hex,
|
1769 |
opacity=opacity,
|
1770 |
+
symbol=symbol,
|
1771 |
+
line=dict(width=1, color='white')
|
1772 |
),
|
1773 |
+
name=f"Hour {point['hour']}" if i % 6 == 0 else None,
|
1774 |
+
showlegend=(i % 6 == 0),
|
1775 |
hovertemplate=(
|
1776 |
f"<b>Hour {point['hour']}</b><br>"
|
1777 |
f"Position: {point['lat']:.1f}°N, {point['lon']:.1f}°E<br>"
|
|
|
1789 |
lon=lons,
|
1790 |
lat=lats,
|
1791 |
mode='lines',
|
1792 |
+
line=dict(color='black', width=3, dash='solid'),
|
1793 |
name='Forecast Track',
|
1794 |
showlegend=True
|
1795 |
),
|
|
|
1804 |
uncertainty_lons_lower = []
|
1805 |
|
1806 |
for i, point in enumerate(route_data):
|
1807 |
+
uncertainty = 0.3 + (i / len(route_data)) * 1.5
|
|
|
|
|
1808 |
uncertainty_lats_upper.append(point['lat'] + uncertainty)
|
1809 |
uncertainty_lats_lower.append(point['lat'] - uncertainty)
|
1810 |
uncertainty_lons_upper.append(point['lon'] + uncertainty)
|
1811 |
uncertainty_lons_lower.append(point['lon'] - uncertainty)
|
1812 |
|
|
|
1813 |
uncertainty_lats = uncertainty_lats_upper + uncertainty_lats_lower[::-1]
|
1814 |
uncertainty_lons = uncertainty_lons_upper + uncertainty_lons_lower[::-1]
|
1815 |
|
|
|
1819 |
lat=uncertainty_lats,
|
1820 |
mode='lines',
|
1821 |
fill='toself',
|
1822 |
+
fillcolor='rgba(128,128,128,0.15)',
|
1823 |
+
line=dict(color='rgba(128,128,128,0.4)', width=1),
|
1824 |
name='Uncertainty Cone',
|
1825 |
showlegend=True
|
1826 |
),
|
1827 |
row=1, col=1
|
1828 |
)
|
1829 |
|
1830 |
+
# 2. Intensity evolution plot (regular subplot - no geo conflicts)
|
1831 |
fig.add_trace(
|
1832 |
go.Scatter(
|
1833 |
x=hours,
|
1834 |
y=intensities,
|
1835 |
mode='lines+markers',
|
1836 |
line=dict(color='red', width=3),
|
1837 |
+
marker=dict(size=6, color='red'),
|
1838 |
+
name='Intensity',
|
1839 |
+
showlegend=False
|
|
|
|
|
|
|
|
|
1840 |
),
|
1841 |
+
row=2, col=1
|
1842 |
)
|
1843 |
|
1844 |
+
# Add category threshold lines (NOT using add_hline to avoid geo conflicts)
|
1845 |
thresholds = [34, 64, 83, 96, 113, 137]
|
1846 |
threshold_names = ['TS', 'C1', 'C2', 'C3', 'C4', 'C5']
|
1847 |
|
1848 |
for thresh, name in zip(thresholds, threshold_names):
|
1849 |
+
fig.add_trace(
|
1850 |
+
go.Scatter(
|
1851 |
+
x=[min(hours), max(hours)],
|
1852 |
+
y=[thresh, thresh],
|
1853 |
+
mode='lines',
|
1854 |
+
line=dict(color='gray', width=1, dash='dash'),
|
1855 |
+
name=name,
|
1856 |
+
showlegend=False,
|
1857 |
+
hovertemplate=f"{name} Threshold: {thresh} kt<extra></extra>"
|
1858 |
+
),
|
1859 |
+
row=2, col=1
|
1860 |
)
|
1861 |
|
1862 |
+
# 3. Position uncertainty plot
|
1863 |
+
uncertainties = [0.3 + (i / len(route_data)) * 1.5 for i in range(len(route_data))]
|
1864 |
+
fig.add_trace(
|
1865 |
+
go.Scatter(
|
1866 |
+
x=hours,
|
1867 |
+
y=uncertainties,
|
1868 |
+
mode='lines+markers',
|
1869 |
+
line=dict(color='orange', width=2),
|
1870 |
+
marker=dict(size=4, color='orange'),
|
1871 |
+
name='Position Error',
|
1872 |
+
showlegend=False
|
1873 |
+
),
|
1874 |
+
row=2, col=2
|
1875 |
+
)
|
1876 |
+
|
1877 |
# Update layout
|
1878 |
fig.update_layout(
|
1879 |
+
title_text="Comprehensive Storm Forecast Analysis",
|
1880 |
showlegend=True,
|
1881 |
+
height=800
|
1882 |
)
|
1883 |
|
1884 |
+
# Update geo layout (only for geo subplot)
|
1885 |
fig.update_geos(
|
1886 |
projection_type="natural earth",
|
1887 |
showland=True,
|
|
|
1890 |
oceancolor="LightBlue",
|
1891 |
showcoastlines=True,
|
1892 |
coastlinecolor="Gray",
|
1893 |
+
center=dict(lat=np.mean(lats), lon=np.mean(lons)),
|
1894 |
resolution=50,
|
1895 |
row=1, col=1
|
1896 |
)
|
1897 |
|
1898 |
+
# Update regular subplot axes (NOT geo)
|
1899 |
+
fig.update_xaxes(title_text="Forecast Hour", row=2, col=1)
|
1900 |
+
fig.update_yaxes(title_text="Intensity (kt)", row=2, col=1)
|
1901 |
+
fig.update_xaxes(title_text="Forecast Hour", row=2, col=2)
|
1902 |
+
fig.update_yaxes(title_text="Position Error (°)", row=2, col=2)
|
1903 |
|
1904 |
# Generate detailed forecast text
|
1905 |
current = prediction_results['current_prediction']
|
|
|
1914 |
|
1915 |
FORECAST TRACK (72-HOUR):
|
1916 |
• Initial Position: {lats[0]:.1f}°N, {lons[0]:.1f}°E
|
1917 |
+
• 24-hour Position: {lats[min(4, len(lats)-1)]:.1f}°N, {lons[min(4, len(lons)-1)]:.1f}°E
|
1918 |
+
• 48-hour Position: {lats[min(8, len(lats)-1)]:.1f}°N, {lons[min(8, len(lons)-1)]:.1f}°E
|
1919 |
• 72-hour Position: {lats[-1]:.1f}°N, {lons[-1]:.1f}°E
|
1920 |
|
1921 |
INTENSITY EVOLUTION:
|
1922 |
• Current: {intensities[0]:.0f} kt ({categories[0]})
|
1923 |
+
• 24-hour: {intensities[min(4, len(intensities)-1)]:.0f} kt ({categories[min(4, len(categories)-1)]})
|
1924 |
+
• 48-hour: {intensities[min(8, len(intensities)-1)]:.0f} kt ({categories[min(8, len(categories)-1)]})
|
1925 |
• 72-hour: {intensities[-1]:.0f} kt ({categories[-1]})
|
1926 |
|
1927 |
CONFIDENCE LEVELS:
|
|
|
1936 |
return fig, forecast_text.strip()
|
1937 |
|
1938 |
except Exception as e:
|
1939 |
+
error_msg = f"Error creating route visualization: {str(e)}"
|
1940 |
+
print(error_msg)
|
1941 |
+
import traceback
|
1942 |
+
traceback.print_exc()
|
1943 |
+
return None, error_msg
|
1944 |
|
1945 |
# -----------------------------
|
1946 |
# Regression Functions (Original)
|