euler314 commited on
Commit
ce399c7
·
verified ·
1 Parent(s): cb19cca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +116 -47
app.py CHANGED
@@ -41,7 +41,7 @@ import tropycal.tracks as tracks
41
  # Configuration and Setup
42
  # -----------------------------
43
  logging.basicConfig(
44
- level=logging.INFO, # Change to DEBUG for more details
45
  format='%(asctime)s - %(levelname)s - %(message)s'
46
  )
47
 
@@ -53,9 +53,9 @@ DATA_PATH = args.data_path
53
  # Data paths
54
  ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
55
  TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
56
- MERGED_DATA_CSV = os.path.join(DATA_PATH, 'merged_typhoon_era5_data.csv') # Used by other analyses
57
 
58
- # IBTrACS settings (used only for typhoon option updates)
59
  BASIN_FILES = {
60
  'EP': 'ibtracs.EP.list.v04r01.csv',
61
  'NA': 'ibtracs.NA.list.v04r01.csv',
@@ -137,6 +137,7 @@ def convert_oni_ascii_to_csv(input_file, output_file):
137
  year = str(int(year)-1)
138
  data[year][month-1] = anom
139
  with open(output_file, 'w', newline='') as f:
 
140
  writer = csv.writer(f)
141
  writer.writerow(['Year','Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'])
142
  for year in sorted(data.keys()):
@@ -155,11 +156,18 @@ def update_oni_data():
155
  os.remove(temp_file)
156
 
157
  def load_data(oni_path, typhoon_path):
158
- oni_data = pd.read_csv(oni_path)
159
- typhoon_data = pd.read_csv(typhoon_path, low_memory=False)
160
- typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
161
- typhoon_data = typhoon_data.dropna(subset=['ISO_TIME'])
162
- return oni_data, typhoon_data
 
 
 
 
 
 
 
163
 
164
  def process_oni_data(oni_data):
165
  oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
@@ -366,7 +374,7 @@ def generate_main_analysis(start_year, start_month, end_year, end_month, enso_ph
366
  filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy()
367
  filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
368
  if enso_phase != 'all':
369
- filtered_data = filtered_data[filtered_data['ENSO_Phase']==enso_phase.capitalize()]
370
  tracks_fig = generate_typhoon_tracks(filtered_data, typhoon_search)
371
  wind_scatter = generate_wind_oni_scatter(filtered_data, typhoon_search)
372
  pressure_scatter = generate_pressure_oni_scatter(filtered_data, typhoon_search)
@@ -379,7 +387,7 @@ def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, ty
379
  filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy()
380
  filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
381
  if enso_phase != 'all':
382
- filtered_data = filtered_data[filtered_data['ENSO_Phase']==enso_phase.capitalize()]
383
  unique_storms = filtered_data['SID'].unique()
384
  count = len(unique_storms)
385
  fig = go.Figure()
@@ -468,10 +476,10 @@ def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
468
  return 'Tropical Storm', atlantic_standard['Tropical Storm']['hex']
469
  return 'Tropical Depression', atlantic_standard['Tropical Depression']['hex']
470
 
471
- # ------------- Updated TSNE Cluster Function with Mean Path and Stats -------------
472
  def update_route_clusters(start_year, start_month, end_year, end_month, enso_value, season):
473
  try:
474
- # Use raw typhoon data merged with ONI info so each storm has multiple points.
475
  raw_data = typhoon_data.copy()
476
  raw_data['Year'] = raw_data['ISO_TIME'].dt.year
477
  raw_data['Month'] = raw_data['ISO_TIME'].dt.strftime('%m')
@@ -489,7 +497,7 @@ def update_route_clusters(start_year, start_month, end_year, end_month, enso_val
489
  merged_raw = merged_raw[merged_raw['ENSO_Phase'] == enso_value.capitalize()]
490
  logging.info(f"Total points after ENSO filtering: {merged_raw.shape[0]}")
491
 
492
- # Apply regional filter for Western Pacific (adjust as needed)
493
  wp_data = merged_raw[(merged_raw['LON'] >= 100) & (merged_raw['LON'] <= 180) &
494
  (merged_raw['LAT'] >= 0) & (merged_raw['LAT'] <= 40)]
495
  logging.info(f"Total points after WP regional filtering: {wp_data.shape[0]}")
@@ -497,7 +505,7 @@ def update_route_clusters(start_year, start_month, end_year, end_month, enso_val
497
  logging.info("WP regional filter returned no data; using all filtered data.")
498
  wp_data = merged_raw
499
 
500
- # Group by SID so that each storm has multiple points
501
  all_storms_data = []
502
  for sid, group in wp_data.groupby('SID'):
503
  group = group.sort_values('ISO_TIME')
@@ -506,16 +514,21 @@ def update_route_clusters(start_year, start_month, end_year, end_month, enso_val
506
  lons = group['LON'].astype(float).values
507
  if len(lons) < 2:
508
  continue
509
- all_storms_data.append((sid, lons, lats, times))
 
 
 
510
  logging.info(f"Storms available for TSNE after grouping: {len(all_storms_data)}")
511
  if not all_storms_data:
512
  return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms for clustering."
513
 
514
- # Interpolate each storm route to a common length
515
  max_length = max(len(item[1]) for item in all_storms_data)
516
  route_vectors = []
 
 
517
  storm_ids = []
518
- for sid, lons, lats, times in all_storms_data:
519
  t = np.linspace(0, 1, len(lons))
520
  t_new = np.linspace(0, 1, max_length)
521
  try:
@@ -529,22 +542,61 @@ def update_route_clusters(start_year, start_month, end_year, end_month, enso_val
529
  continue
530
  route_vectors.append(route_vector)
531
  storm_ids.append(sid)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
532
  logging.info(f"Storms with valid route vectors: {len(route_vectors)}")
533
  if len(route_vectors) == 0:
534
  return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms after interpolation."
535
 
536
  route_vectors = np.array(route_vectors)
 
 
 
 
537
  tsne = TSNE(n_components=2, random_state=42, verbose=1)
538
  tsne_results = tsne.fit_transform(route_vectors)
539
 
540
- dbscan = DBSCAN(eps=5, min_samples=3)
541
- labels = dbscan.fit_predict(tsne_results)
542
- unique_labels = sorted(set(labels) - {-1})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
543
 
 
544
  fig_tsne = go.Figure()
545
  colors = px.colors.qualitative.Safe
 
546
  for i, label in enumerate(unique_labels):
547
- indices = np.where(labels == label)[0]
548
  fig_tsne.add_trace(go.Scatter(
549
  x=tsne_results[indices, 0],
550
  y=tsne_results[indices, 1],
@@ -552,7 +604,7 @@ def update_route_clusters(start_year, start_month, end_year, end_month, enso_val
552
  marker=dict(color=colors[i % len(colors)]),
553
  name=f"Cluster {label}"
554
  ))
555
- noise_indices = np.where(labels == -1)[0]
556
  if len(noise_indices) > 0:
557
  fig_tsne.add_trace(go.Scatter(
558
  x=tsne_results[noise_indices, 0],
@@ -567,12 +619,11 @@ def update_route_clusters(start_year, start_month, end_year, end_month, enso_val
567
  yaxis_title="t-SNE Dim 2"
568
  )
569
 
570
- # Compute mean route for each cluster and cluster stats (average wind and pressure)
571
  fig_routes = go.Figure()
572
- fig_stats = make_subplots(rows=2, cols=1, shared_xaxes=True,
573
- subplot_titles=("Average Wind Speed (knots)", "Average MSLP (hPa)"))
574
  for i, label in enumerate(unique_labels):
575
- indices = np.where(labels == label)[0]
576
  cluster_ids = [storm_ids[j] for j in indices]
577
  cluster_vectors = route_vectors[indices, :]
578
  mean_vector = np.mean(cluster_vectors, axis=0)
@@ -586,35 +637,51 @@ def update_route_clusters(start_year, start_month, end_year, end_month, enso_val
586
  line=dict(width=4, color=colors[i % len(colors)]),
587
  name=f"Cluster {label} Mean Route"
588
  ))
589
- # Get cluster data from raw filtered data (wp_data)
590
- cluster_data = wp_data[wp_data['SID'].isin(cluster_ids)]
591
- avg_wind = cluster_data['USA_WIND'].mean() if 'USA_WIND' in cluster_data.columns else np.nan
592
- avg_pres = cluster_data['USA_PRES'].mean() if 'USA_PRES' in cluster_data.columns else np.nan
593
- # Plot horizontal lines for cluster stats at index i
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
594
  fig_stats.add_trace(go.Scatter(
595
- x=[i, i],
596
- y=[avg_wind, avg_wind],
597
- mode='lines+markers',
598
  line=dict(width=2, color=colors[i % len(colors)]),
599
- name=f"Cluster {label} Avg Wind"
600
  ), row=1, col=1)
601
  fig_stats.add_trace(go.Scatter(
602
- x=[i, i],
603
- y=[avg_pres, avg_pres],
604
- mode='lines+markers',
605
  line=dict(width=2, color=colors[i % len(colors)]),
606
- name=f"Cluster {label} Avg Pres"
607
  ), row=2, col=1)
608
-
609
  fig_stats.update_layout(
610
- title="Cluster Statistics",
611
- xaxis_title="Cluster Index",
612
- yaxis_title="Average Wind Speed (knots)",
613
- xaxis2_title="Cluster Index",
614
- yaxis2_title="Average MSLP (hPa)",
615
  showlegend=True
616
  )
617
- info = "TSNE clustering complete."
 
618
  return fig_tsne, fig_routes, fig_stats, info
619
  except Exception as e:
620
  logging.error(f"Error in TSNE clustering: {e}")
@@ -784,7 +851,9 @@ with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
784
  - **Pressure Analysis**: Analyze pressure vs ONI relationships.
785
  - **Longitude Analysis**: Study typhoon generation longitude vs ONI.
786
  - **Path Animation**: View animated storm tracks on a free stock world map (centered at 180°) with a dynamic sidebar and persistent legend.
787
- - **TSNE Cluster**: Perform t-SNE clustering on WP storm routes using raw merged typhoon+ONI data with detailed error management. Mean storm routes and cluster-level average wind/pressure are computed.
 
 
788
  """)
789
 
790
  with gr.Tab("Track Visualization"):
 
41
  # Configuration and Setup
42
  # -----------------------------
43
  logging.basicConfig(
44
+ level=logging.INFO, # Use DEBUG for more details
45
  format='%(asctime)s - %(levelname)s - %(message)s'
46
  )
47
 
 
53
  # Data paths
54
  ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
55
  TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
56
+ MERGED_DATA_CSV = os.path.join(DATA_PATH, 'merged_typhoon_era5_data.csv') # used in other tabs
57
 
58
+ # IBTrACS settings (only used for updating typhoon options)
59
  BASIN_FILES = {
60
  'EP': 'ibtracs.EP.list.v04r01.csv',
61
  'NA': 'ibtracs.NA.list.v04r01.csv',
 
137
  year = str(int(year)-1)
138
  data[year][month-1] = anom
139
  with open(output_file, 'w', newline='') as f:
140
+ writer = pd.ExcelWriter(f)
141
  writer = csv.writer(f)
142
  writer.writerow(['Year','Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'])
143
  for year in sorted(data.keys()):
 
156
  os.remove(temp_file)
157
 
158
  def load_data(oni_path, typhoon_path):
159
+ if not os.path.exists(typhoon_path):
160
+ logging.error(f"Typhoon data file not found: {typhoon_path}")
161
+ return pd.DataFrame(), pd.DataFrame()
162
+ try:
163
+ oni_data = pd.read_csv(oni_path)
164
+ typhoon_data = pd.read_csv(typhoon_path, low_memory=False)
165
+ typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
166
+ typhoon_data = typhoon_data.dropna(subset=['ISO_TIME'])
167
+ return oni_data, typhoon_data
168
+ except Exception as e:
169
+ logging.error(f"Error loading data: {e}")
170
+ return pd.DataFrame(), pd.DataFrame()
171
 
172
  def process_oni_data(oni_data):
173
  oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
 
374
  filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy()
375
  filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
376
  if enso_phase != 'all':
377
+ filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
378
  tracks_fig = generate_typhoon_tracks(filtered_data, typhoon_search)
379
  wind_scatter = generate_wind_oni_scatter(filtered_data, typhoon_search)
380
  pressure_scatter = generate_pressure_oni_scatter(filtered_data, typhoon_search)
 
387
  filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy()
388
  filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
389
  if enso_phase != 'all':
390
+ filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
391
  unique_storms = filtered_data['SID'].unique()
392
  count = len(unique_storms)
393
  fig = go.Figure()
 
476
  return 'Tropical Storm', atlantic_standard['Tropical Storm']['hex']
477
  return 'Tropical Depression', atlantic_standard['Tropical Depression']['hex']
478
 
479
+ # ------------- Updated TSNE Cluster Function with Mean Curves -------------
480
  def update_route_clusters(start_year, start_month, end_year, end_month, enso_value, season):
481
  try:
482
+ # Merge raw typhoon data with ONI so that each storm has multiple points.
483
  raw_data = typhoon_data.copy()
484
  raw_data['Year'] = raw_data['ISO_TIME'].dt.year
485
  raw_data['Month'] = raw_data['ISO_TIME'].dt.strftime('%m')
 
497
  merged_raw = merged_raw[merged_raw['ENSO_Phase'] == enso_value.capitalize()]
498
  logging.info(f"Total points after ENSO filtering: {merged_raw.shape[0]}")
499
 
500
+ # Apply regional filter for Western Pacific (adjust boundaries as needed)
501
  wp_data = merged_raw[(merged_raw['LON'] >= 100) & (merged_raw['LON'] <= 180) &
502
  (merged_raw['LAT'] >= 0) & (merged_raw['LAT'] <= 40)]
503
  logging.info(f"Total points after WP regional filtering: {wp_data.shape[0]}")
 
505
  logging.info("WP regional filter returned no data; using all filtered data.")
506
  wp_data = merged_raw
507
 
508
+ # Group by storm ID (SID); each group must have at least 2 observations
509
  all_storms_data = []
510
  for sid, group in wp_data.groupby('SID'):
511
  group = group.sort_values('ISO_TIME')
 
514
  lons = group['LON'].astype(float).values
515
  if len(lons) < 2:
516
  continue
517
+ # Also store wind and pressure for interpolation
518
+ wind = group['USA_WIND'].astype(float).values if 'USA_WIND' in group.columns else None
519
+ pres = group['USA_PRES'].astype(float).values if 'USA_PRES' in group.columns else None
520
+ all_storms_data.append((sid, lons, lats, times, wind, pres))
521
  logging.info(f"Storms available for TSNE after grouping: {len(all_storms_data)}")
522
  if not all_storms_data:
523
  return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms for clustering."
524
 
525
+ # Interpolate each storm's route (and wind/pressure) to a common length
526
  max_length = max(len(item[1]) for item in all_storms_data)
527
  route_vectors = []
528
+ wind_curves = []
529
+ pres_curves = []
530
  storm_ids = []
531
+ for sid, lons, lats, times, wind, pres in all_storms_data:
532
  t = np.linspace(0, 1, len(lons))
533
  t_new = np.linspace(0, 1, max_length)
534
  try:
 
542
  continue
543
  route_vectors.append(route_vector)
544
  storm_ids.append(sid)
545
+ # Interpolate wind and pressure if available; otherwise, fill with NaN
546
+ if wind is not None and len(wind) >= 2:
547
+ try:
548
+ wind_interp = interp1d(t, wind, kind='linear', fill_value='extrapolate')(t_new)
549
+ except Exception as ex:
550
+ logging.error(f"Wind interpolation error for storm {sid}: {ex}")
551
+ wind_interp = np.full(max_length, np.nan)
552
+ else:
553
+ wind_interp = np.full(max_length, np.nan)
554
+ if pres is not None and len(pres) >= 2:
555
+ try:
556
+ pres_interp = interp1d(t, pres, kind='linear', fill_value='extrapolate')(t_new)
557
+ except Exception as ex:
558
+ logging.error(f"Pressure interpolation error for storm {sid}: {ex}")
559
+ pres_interp = np.full(max_length, np.nan)
560
+ else:
561
+ pres_interp = np.full(max_length, np.nan)
562
+ wind_curves.append(wind_interp)
563
+ pres_curves.append(pres_interp)
564
  logging.info(f"Storms with valid route vectors: {len(route_vectors)}")
565
  if len(route_vectors) == 0:
566
  return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms after interpolation."
567
 
568
  route_vectors = np.array(route_vectors)
569
+ wind_curves = np.array(wind_curves)
570
+ pres_curves = np.array(pres_curves)
571
+
572
+ # Run TSNE on route vectors
573
  tsne = TSNE(n_components=2, random_state=42, verbose=1)
574
  tsne_results = tsne.fit_transform(route_vectors)
575
 
576
+ # Dynamic DBSCAN: choose eps so that we have roughly 5 to 20 clusters if possible
577
+ selected_labels = None
578
+ selected_eps = None
579
+ for eps in np.linspace(1.0, 10.0, 91):
580
+ dbscan = DBSCAN(eps=eps, min_samples=3)
581
+ labels = dbscan.fit_predict(tsne_results)
582
+ clusters = set(labels) - {-1}
583
+ num_clusters = len(clusters)
584
+ if 5 <= num_clusters <= 20:
585
+ selected_labels = labels
586
+ selected_eps = eps
587
+ break
588
+ if selected_labels is None:
589
+ selected_eps = 5.0
590
+ dbscan = DBSCAN(eps=selected_eps, min_samples=3)
591
+ selected_labels = dbscan.fit_predict(tsne_results)
592
+ logging.info(f"Selected DBSCAN eps: {selected_eps:.2f} yielding {len(set(selected_labels) - {-1})} clusters.")
593
 
594
+ # TSNE scatter plot
595
  fig_tsne = go.Figure()
596
  colors = px.colors.qualitative.Safe
597
+ unique_labels = sorted(set(selected_labels) - {-1})
598
  for i, label in enumerate(unique_labels):
599
+ indices = np.where(selected_labels == label)[0]
600
  fig_tsne.add_trace(go.Scatter(
601
  x=tsne_results[indices, 0],
602
  y=tsne_results[indices, 1],
 
604
  marker=dict(color=colors[i % len(colors)]),
605
  name=f"Cluster {label}"
606
  ))
607
+ noise_indices = np.where(selected_labels == -1)[0]
608
  if len(noise_indices) > 0:
609
  fig_tsne.add_trace(go.Scatter(
610
  x=tsne_results[noise_indices, 0],
 
619
  yaxis_title="t-SNE Dim 2"
620
  )
621
 
622
+ # For each cluster, compute mean route, mean wind curve, and mean pressure curve.
623
  fig_routes = go.Figure()
624
+ cluster_stats = [] # To hold mean curves for wind and pressure
 
625
  for i, label in enumerate(unique_labels):
626
+ indices = np.where(selected_labels == label)[0]
627
  cluster_ids = [storm_ids[j] for j in indices]
628
  cluster_vectors = route_vectors[indices, :]
629
  mean_vector = np.mean(cluster_vectors, axis=0)
 
637
  line=dict(width=4, color=colors[i % len(colors)]),
638
  name=f"Cluster {label} Mean Route"
639
  ))
640
+ # Get storms in this cluster from wp_data by SID
641
+ cluster_raw = wp_data[wp_data['SID'].isin(cluster_ids)]
642
+ # For each storm in the cluster, we already interpolated wind_curves and pres_curves.
643
+ cluster_winds = wind_curves[indices, :] # shape: (#storms, max_length)
644
+ cluster_pres = pres_curves[indices, :] # shape: (#storms, max_length)
645
+ # Compute mean curves (if available)
646
+ if cluster_winds.size > 0:
647
+ mean_wind_curve = np.nanmean(cluster_winds, axis=0)
648
+ else:
649
+ mean_wind_curve = np.full(max_length, np.nan)
650
+ if cluster_pres.size > 0:
651
+ mean_pres_curve = np.nanmean(cluster_pres, axis=0)
652
+ else:
653
+ mean_pres_curve = np.full(max_length, np.nan)
654
+ cluster_stats.append((label, mean_wind_curve, mean_pres_curve))
655
+
656
+ # Create cluster stats plot with curves vs normalized route index (0 to 1)
657
+ x_axis = np.linspace(0, 1, max_length)
658
+ fig_stats = make_subplots(rows=2, cols=1, shared_xaxes=True,
659
+ subplot_titles=("Mean Wind Speed (knots)", "Mean MSLP (hPa)"))
660
+ for i, (label, wind_curve, pres_curve) in enumerate(cluster_stats):
661
  fig_stats.add_trace(go.Scatter(
662
+ x=x_axis,
663
+ y=wind_curve,
664
+ mode='lines',
665
  line=dict(width=2, color=colors[i % len(colors)]),
666
+ name=f"Cluster {label} Mean Wind"
667
  ), row=1, col=1)
668
  fig_stats.add_trace(go.Scatter(
669
+ x=x_axis,
670
+ y=pres_curve,
671
+ mode='lines',
672
  line=dict(width=2, color=colors[i % len(colors)]),
673
+ name=f"Cluster {label} Mean MSLP"
674
  ), row=2, col=1)
 
675
  fig_stats.update_layout(
676
+ title="Cluster Mean Curves",
677
+ xaxis_title="Normalized Route Index",
678
+ yaxis_title="Mean Wind Speed (knots)",
679
+ xaxis2_title="Normalized Route Index",
680
+ yaxis2_title="Mean MSLP (hPa)",
681
  showlegend=True
682
  )
683
+
684
+ info = f"TSNE clustering complete. Selected eps: {selected_eps:.2f}. Clusters: {len(unique_labels)}."
685
  return fig_tsne, fig_routes, fig_stats, info
686
  except Exception as e:
687
  logging.error(f"Error in TSNE clustering: {e}")
 
851
  - **Pressure Analysis**: Analyze pressure vs ONI relationships.
852
  - **Longitude Analysis**: Study typhoon generation longitude vs ONI.
853
  - **Path Animation**: View animated storm tracks on a free stock world map (centered at 180°) with a dynamic sidebar and persistent legend.
854
+ - **TSNE Cluster**: Perform t-SNE clustering on WP storm routes using raw merged typhoon+ONI data.
855
+ For each cluster, a mean route is computed and, importantly, mean wind and MSLP curves (plotted versus normalized route index)
856
+ are computed from start to end.
857
  """)
858
 
859
  with gr.Tab("Track Visualization"):