euler314 commited on
Commit
f7bff1c
Β·
verified Β·
1 Parent(s): 2126341

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +509 -102
app.py CHANGED
@@ -105,6 +105,7 @@ CACHE_EXPIRY_DAYS = 1
105
  # -----------------------------
106
  # ENHANCED: Color Maps and Standards with TD Support
107
  # -----------------------------
 
108
  enhanced_color_map = {
109
  'Unknown': 'rgb(200, 200, 200)',
110
  'Tropical Depression': 'rgb(128, 128, 128)', # NEW: Gray for TD
@@ -116,6 +117,43 @@ enhanced_color_map = {
116
  'C5 Super Typhoon': 'rgb(255, 0, 0)'
117
  }
118
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
  # Original color map for backward compatibility
120
  color_map = {
121
  'C5 Super Typhoon': 'rgb(255, 0, 0)',
@@ -688,11 +726,11 @@ def classify_enso_phases(oni_value):
688
  return 'Neutral'
689
 
690
  # -----------------------------
691
- # NEW: Advanced ML Features
692
  # -----------------------------
693
 
694
  def extract_storm_features(typhoon_data):
695
- """Extract features for clustering analysis"""
696
  # Group by storm ID to get storm-level features
697
  storm_features = typhoon_data.groupby('SID').agg({
698
  'USA_WIND': ['max', 'mean', 'std'],
@@ -715,6 +753,47 @@ def extract_storm_features(typhoon_data):
715
  genesis_data.columns = ['genesis_lat', 'genesis_lon', 'genesis_intensity']
716
  storm_features = storm_features.merge(genesis_data, on='SID', how='left')
717
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
718
  return storm_features
719
 
720
  def perform_dimensionality_reduction(storm_features, method='umap', n_components=2):
@@ -754,10 +833,10 @@ def perform_dimensionality_reduction(storm_features, method='umap', n_components
754
 
755
  return embedding, feature_cols, scaler
756
 
757
- def cluster_storms(embedding, method='dbscan'):
758
  """Cluster storms based on their embedding"""
759
  if method.lower() == 'dbscan':
760
- clusterer = DBSCAN(eps=0.5, min_samples=5)
761
  elif method.lower() == 'kmeans':
762
  clusterer = KMeans(n_clusters=5, random_state=42)
763
  else:
@@ -766,8 +845,8 @@ def cluster_storms(embedding, method='dbscan'):
766
  clusters = clusterer.fit_predict(embedding)
767
  return clusters
768
 
769
- def create_clustering_visualization(storm_features, typhoon_data, method='umap'):
770
- """Create interactive clustering visualization"""
771
  try:
772
  # Perform dimensionality reduction
773
  embedding, feature_cols, scaler = perform_dimensionality_reduction(storm_features, method)
@@ -785,37 +864,179 @@ def create_clustering_visualization(storm_features, typhoon_data, method='umap')
785
  storm_info = typhoon_data.groupby('SID').first()[['NAME', 'SEASON']].reset_index()
786
  storm_features_viz = storm_features_viz.merge(storm_info, on='SID', how='left')
787
 
788
- # Create interactive plot
789
- fig = px.scatter(
790
- storm_features_viz,
791
- x='dim1',
792
- y='dim2',
793
- color='cluster',
794
- hover_data=['NAME', 'SEASON', 'USA_WIND_max', 'USA_PRES_min'],
795
- title=f'Storm Clustering using {method.upper()}',
796
- labels={
797
- 'dim1': f'{method.upper()} Dimension 1',
798
- 'dim2': f'{method.upper()} Dimension 2',
799
- 'cluster': 'Cluster'
800
- }
801
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
802
 
803
- # Add cluster statistics
804
  cluster_stats = storm_features_viz.groupby('cluster').agg({
805
- 'USA_WIND_max': 'mean',
806
- 'USA_PRES_min': 'mean',
807
- 'track_length': 'mean',
 
 
 
 
808
  'SID': 'count'
809
  }).round(2)
810
 
811
- stats_text = "Cluster Statistics:\n"
812
- for cluster, stats in cluster_stats.iterrows():
813
- if cluster != -1: # Skip noise points in DBSCAN
814
- stats_text += f"Cluster {cluster}: {stats['SID']} storms, avg max wind: {stats['USA_WIND_max']} kt\n"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
815
 
816
  return fig, stats_text, storm_features_viz
 
817
  except Exception as e:
818
- return None, f"Error in clustering: {str(e)}", None
 
 
 
 
 
 
 
819
 
820
  # -----------------------------
821
  # NEW: Optional CNN Implementation
@@ -1134,33 +1355,33 @@ def get_longitude_analysis(start_year, start_month, end_year, end_month, enso_ph
1134
  return fig, slopes_text, regression
1135
 
1136
  def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
1137
- """Categorize typhoon by standard with enhanced TD support"""
1138
  if pd.isna(wind_speed):
1139
  return 'Tropical Depression', '#808080'
1140
 
1141
  if standard=='taiwan':
1142
  wind_speed_ms = wind_speed * 0.514444
1143
  if wind_speed_ms >= 51.0:
1144
- return 'Strong Typhoon', taiwan_standard['Strong Typhoon']['hex']
1145
  elif wind_speed_ms >= 33.7:
1146
- return 'Medium Typhoon', taiwan_standard['Medium Typhoon']['hex']
1147
  elif wind_speed_ms >= 17.2:
1148
- return 'Mild Typhoon', taiwan_standard['Mild Typhoon']['hex']
1149
- return 'Tropical Depression', taiwan_standard['Tropical Depression']['hex']
1150
  else:
1151
  if wind_speed >= 137:
1152
- return 'C5 Super Typhoon', atlantic_standard['C5 Super Typhoon']['hex']
1153
  elif wind_speed >= 113:
1154
- return 'C4 Very Strong Typhoon', atlantic_standard['C4 Very Strong Typhoon']['hex']
1155
  elif wind_speed >= 96:
1156
- return 'C3 Strong Typhoon', atlantic_standard['C3 Strong Typhoon']['hex']
1157
  elif wind_speed >= 83:
1158
- return 'C2 Typhoon', atlantic_standard['C2 Typhoon']['hex']
1159
  elif wind_speed >= 64:
1160
- return 'C1 Typhoon', atlantic_standard['C1 Typhoon']['hex']
1161
  elif wind_speed >= 34:
1162
- return 'Tropical Storm', atlantic_standard['Tropical Storm']['hex']
1163
- return 'Tropical Depression', atlantic_standard['Tropical Depression']['hex']
1164
 
1165
  # -----------------------------
1166
  # ENHANCED: Animation Functions
@@ -1234,7 +1455,7 @@ def update_typhoon_options_enhanced(year, basin):
1234
  return gr.update(choices=["Error loading storms"], value=None)
1235
 
1236
  def generate_enhanced_track_video(year, typhoon_selection, standard):
1237
- """Enhanced track video generation with TD support and 2025 compatibility"""
1238
  if not typhoon_selection or typhoon_selection == "No storms found":
1239
  return None
1240
 
@@ -1245,6 +1466,7 @@ def generate_enhanced_track_video(year, typhoon_selection, standard):
1245
  # Get storm data
1246
  storm_df = typhoon_data[typhoon_data['SID'] == sid].copy()
1247
  if storm_df.empty:
 
1248
  return None
1249
 
1250
  # Sort by time
@@ -1264,6 +1486,8 @@ def generate_enhanced_track_video(year, typhoon_selection, standard):
1264
  storm_name = storm_df['NAME'].iloc[0] if pd.notna(storm_df['NAME'].iloc[0]) else "UNNAMED"
1265
  season = storm_df['SEASON'].iloc[0] if 'SEASON' in storm_df.columns else year
1266
 
 
 
1267
  # Create figure with enhanced map
1268
  fig, ax = plt.subplots(figsize=(14, 8), subplot_kw={'projection': ccrs.PlateCarree()})
1269
 
@@ -1297,48 +1521,60 @@ def generate_enhanced_track_video(year, typhoon_selection, standard):
1297
  fontsize=11, verticalalignment='top',
1298
  bbox=dict(boxstyle="round,pad=0.5", facecolor='white', alpha=0.9))
1299
 
1300
- # Color legend with TD support
1301
  legend_elements = []
1302
- for category, color in enhanced_color_map.items():
1303
- legend_elements.append(plt.Line2D([0], [0], marker='o', color='w',
1304
- markerfacecolor=color, markersize=8, label=category))
 
 
 
1305
 
1306
  ax.legend(handles=legend_elements, loc='upper right', fontsize=9)
1307
 
1308
  def animate(frame):
1309
- if frame >= len(lats):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1310
  return line, point, info_box
1311
-
1312
- # Update track line
1313
- line.set_data(lons[:frame+1], lats[:frame+1])
1314
-
1315
- # Update current position
1316
- current_wind = winds[frame]
1317
- category = categorize_typhoon_enhanced(current_wind)
1318
- color = enhanced_color_map[category]
1319
-
1320
- point.set_data([lons[frame]], [lats[frame]])
1321
- point.set_color(color)
1322
- point.set_markersize(8 + current_wind/10) # Size based on intensity
1323
-
1324
- # Enhanced info display
1325
- if 'ISO_TIME' in storm_df.columns:
1326
- current_time = storm_df.iloc[frame]['ISO_TIME']
1327
- time_str = current_time.strftime('%Y-%m-%d %H:%M UTC') if pd.notna(current_time) else 'Unknown'
1328
- else:
1329
- time_str = f"Step {frame+1}"
1330
-
1331
- info_text = (
1332
- f"Storm: {storm_name}\n"
1333
- f"Time: {time_str}\n"
1334
- f"Position: {lats[frame]:.1f}Β°N, {lons[frame]:.1f}Β°E\n"
1335
- f"Max Wind: {current_wind:.0f} kt\n"
1336
- f"Category: {category}\n"
1337
- f"Frame: {frame+1}/{len(lats)}"
1338
- )
1339
- info_box.set_text(info_text)
1340
-
1341
- return line, point, info_box
1342
 
1343
  # Create animation
1344
  anim = animation.FuncAnimation(
@@ -1356,18 +1592,22 @@ def generate_enhanced_track_video(year, typhoon_selection, standard):
1356
  extra_args=['-pix_fmt', 'yuv420p'] # Better compatibility
1357
  )
1358
 
 
1359
  anim.save(temp_file.name, writer=writer, dpi=100)
1360
  plt.close(fig)
1361
 
 
1362
  return temp_file.name
1363
 
1364
  except Exception as e:
1365
- print(f"Error generating video: {e}")
 
 
1366
  return None
1367
 
1368
- # Simplified wrapper for backward compatibility
1369
  def simplified_track_video(year, basin, typhoon, standard):
1370
- """Simplified track video function"""
1371
  if not typhoon:
1372
  return None
1373
  return generate_enhanced_track_video(year, typhoon, standard)
@@ -1436,7 +1676,7 @@ def create_interface():
1436
  This dashboard provides comprehensive analysis of typhoon data in relation to ENSO phases with advanced machine learning capabilities.
1437
 
1438
  ### πŸ†• Enhanced Features:
1439
- - **πŸ“ˆ Advanced ML Clustering**: UMAP/t-SNE storm pattern analysis
1440
  - **πŸ€– Optional CNN Predictions**: Deep learning intensity forecasting
1441
  - **πŸŒ€ Complete TD Support**: Now includes Tropical Depressions (< 34 kt)
1442
  - **πŸ“… 2025 Data Ready**: Real-time compatibility with current year data
@@ -1455,8 +1695,9 @@ def create_interface():
1455
  - **Platform Compatibility**: βœ… Optimized for Hugging Face Spaces
1456
  """)
1457
 
1458
- with gr.Tab("πŸ” Advanced ML Clustering"):
1459
- gr.Markdown("## Storm Pattern Analysis using UMAP/t-SNE")
 
1460
 
1461
  with gr.Row():
1462
  reduction_method = gr.Dropdown(
@@ -1464,40 +1705,50 @@ def create_interface():
1464
  value='UMAP' if UMAP_AVAILABLE else 't-SNE',
1465
  label="Dimensionality Reduction Method"
1466
  )
1467
- cluster_method = gr.Dropdown(
1468
- choices=['DBSCAN', 'K-Means'],
1469
- value='DBSCAN',
1470
- label="Clustering Method"
1471
  )
1472
 
1473
- analyze_clusters_btn = gr.Button("🎯 Analyze Storm Clusters", variant="primary")
1474
 
1475
  with gr.Row():
1476
- cluster_plot = gr.Plot(label="Storm Clustering Visualization")
1477
- cluster_stats = gr.Textbox(label="Cluster Statistics", lines=10)
1478
 
1479
- def run_clustering_analysis(method):
 
 
 
1480
  try:
1481
  # Extract features for clustering
1482
  storm_features = extract_storm_features(typhoon_data)
1483
- fig, stats, _ = create_clustering_visualization(storm_features, typhoon_data, method.lower())
1484
  return fig, stats
1485
  except Exception as e:
1486
- return None, f"Error: {str(e)}"
 
 
1487
 
1488
  analyze_clusters_btn.click(
1489
- fn=run_clustering_analysis,
1490
- inputs=[reduction_method],
1491
  outputs=[cluster_plot, cluster_stats]
1492
  )
1493
 
1494
  gr.Markdown("""
1495
- ### ℹ️ About Storm Clustering:
1496
- - **UMAP**: Faster and preserves global structure better
1497
- - **t-SNE**: Good for local neighborhood preservation
1498
- - **PCA**: Linear dimensionality reduction (fallback)
1499
- - **DBSCAN**: Density-based clustering, finds natural groupings
1500
- - **K-Means**: Partitions storms into K predefined clusters
 
 
 
 
 
 
1501
  """)
1502
 
1503
  with gr.Tab("πŸ€– Intensity Prediction"):
@@ -1666,6 +1917,125 @@ def create_interface():
1666
  - **⚑ Optimized Export**: Fast rendering with web-compatible video formats
1667
  """)
1668
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1669
  return demo
1670
  except Exception as e:
1671
  logging.error(f"Error creating Gradio interface: {e}")
@@ -1673,8 +2043,45 @@ def create_interface():
1673
  with gr.Blocks() as demo:
1674
  gr.Markdown("# πŸŒͺ️ Enhanced Typhoon Analysis Platform")
1675
  gr.Markdown("**Error**: Could not load full interface. Please check logs.")
 
1676
  return demo
1677
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1678
  # Create and launch the interface
1679
  demo = create_interface()
1680
 
 
105
  # -----------------------------
106
  # ENHANCED: Color Maps and Standards with TD Support
107
  # -----------------------------
108
+ # Enhanced color mapping with TD support (for Plotly)
109
  enhanced_color_map = {
110
  'Unknown': 'rgb(200, 200, 200)',
111
  'Tropical Depression': 'rgb(128, 128, 128)', # NEW: Gray for TD
 
117
  'C5 Super Typhoon': 'rgb(255, 0, 0)'
118
  }
119
 
120
+ # Matplotlib-compatible color mapping (hex colors)
121
+ matplotlib_color_map = {
122
+ 'Unknown': '#C8C8C8',
123
+ 'Tropical Depression': '#808080', # Gray for TD
124
+ 'Tropical Storm': '#0000FF', # Blue
125
+ 'C1 Typhoon': '#00FFFF', # Cyan
126
+ 'C2 Typhoon': '#00FF00', # Green
127
+ 'C3 Strong Typhoon': '#FFFF00', # Yellow
128
+ 'C4 Very Strong Typhoon': '#FFA500', # Orange
129
+ 'C5 Super Typhoon': '#FF0000' # Red
130
+ }
131
+
132
+ def rgb_string_to_hex(rgb_string):
133
+ """Convert 'rgb(r,g,b)' string to hex color for matplotlib"""
134
+ try:
135
+ # Extract numbers from 'rgb(r,g,b)' format
136
+ import re
137
+ numbers = re.findall(r'\d+', rgb_string)
138
+ if len(numbers) == 3:
139
+ r, g, b = map(int, numbers)
140
+ return f'#{r:02x}{g:02x}{b:02x}'
141
+ else:
142
+ return '#808080' # Default gray
143
+ except:
144
+ return '#808080' # Default gray
145
+
146
+ def get_matplotlib_color(category):
147
+ """Get matplotlib-compatible color for a storm category"""
148
+ return matplotlib_color_map.get(category, '#808080')
149
+
150
+ # Cluster colors for route visualization
151
+ CLUSTER_COLORS = [
152
+ '#FF6B6B', '#4ECDC4', '#45B7D1', '#96CEB4', '#FFEAA7',
153
+ '#DDA0DD', '#98D8C8', '#F7DC6F', '#BB8FCE', '#85C1E9',
154
+ '#F8C471', '#82E0AA', '#F1948A', '#85C1E9', '#D2B4DE'
155
+ ]
156
+
157
  # Original color map for backward compatibility
158
  color_map = {
159
  'C5 Super Typhoon': 'rgb(255, 0, 0)',
 
726
  return 'Neutral'
727
 
728
  # -----------------------------
729
+ # NEW: ADVANCED ML FEATURES WITH ROUTE VISUALIZATION
730
  # -----------------------------
731
 
732
  def extract_storm_features(typhoon_data):
733
+ """Extract comprehensive features for clustering analysis"""
734
  # Group by storm ID to get storm-level features
735
  storm_features = typhoon_data.groupby('SID').agg({
736
  'USA_WIND': ['max', 'mean', 'std'],
 
753
  genesis_data.columns = ['genesis_lat', 'genesis_lon', 'genesis_intensity']
754
  storm_features = storm_features.merge(genesis_data, on='SID', how='left')
755
 
756
+ # Add track shape features
757
+ track_stats = []
758
+ for sid in storm_features['SID']:
759
+ storm_track = typhoon_data[typhoon_data['SID'] == sid].sort_values('ISO_TIME')
760
+ if len(storm_track) > 2:
761
+ # Calculate track curvature and direction changes
762
+ lats = storm_track['LAT'].values
763
+ lons = storm_track['LON'].values
764
+
765
+ # Calculate bearing changes
766
+ bearing_changes = []
767
+ for i in range(1, len(lats)-1):
768
+ # Simple bearing calculation
769
+ dlat1 = lats[i] - lats[i-1]
770
+ dlon1 = lons[i] - lons[i-1]
771
+ dlat2 = lats[i+1] - lats[i]
772
+ dlon2 = lons[i+1] - lons[i]
773
+
774
+ angle1 = np.arctan2(dlat1, dlon1)
775
+ angle2 = np.arctan2(dlat2, dlon2)
776
+ change = abs(angle2 - angle1)
777
+ bearing_changes.append(change)
778
+
779
+ avg_curvature = np.mean(bearing_changes) if bearing_changes else 0
780
+ total_distance = np.sum(np.sqrt((np.diff(lats)**2 + np.diff(lons)**2)))
781
+
782
+ track_stats.append({
783
+ 'SID': sid,
784
+ 'avg_curvature': avg_curvature,
785
+ 'total_distance': total_distance
786
+ })
787
+ else:
788
+ track_stats.append({
789
+ 'SID': sid,
790
+ 'avg_curvature': 0,
791
+ 'total_distance': 0
792
+ })
793
+
794
+ track_stats_df = pd.DataFrame(track_stats)
795
+ storm_features = storm_features.merge(track_stats_df, on='SID', how='left')
796
+
797
  return storm_features
798
 
799
  def perform_dimensionality_reduction(storm_features, method='umap', n_components=2):
 
833
 
834
  return embedding, feature_cols, scaler
835
 
836
+ def cluster_storms(embedding, method='dbscan', eps=0.5, min_samples=3):
837
  """Cluster storms based on their embedding"""
838
  if method.lower() == 'dbscan':
839
+ clusterer = DBSCAN(eps=eps, min_samples=min_samples)
840
  elif method.lower() == 'kmeans':
841
  clusterer = KMeans(n_clusters=5, random_state=42)
842
  else:
 
845
  clusters = clusterer.fit_predict(embedding)
846
  return clusters
847
 
848
+ def create_advanced_clustering_visualization(storm_features, typhoon_data, method='umap', show_routes=True):
849
+ """Create comprehensive clustering visualization with route display"""
850
  try:
851
  # Perform dimensionality reduction
852
  embedding, feature_cols, scaler = perform_dimensionality_reduction(storm_features, method)
 
864
  storm_info = typhoon_data.groupby('SID').first()[['NAME', 'SEASON']].reset_index()
865
  storm_features_viz = storm_features_viz.merge(storm_info, on='SID', how='left')
866
 
867
+ if show_routes:
868
+ # Create subplot with both scatter plot and route map
869
+ fig = make_subplots(
870
+ rows=1, cols=2,
871
+ subplot_titles=(
872
+ f'Storm Clustering using {method.upper()}',
873
+ 'Clustered Storm Routes'
874
+ ),
875
+ specs=[[{"type": "scatter"}, {"type": "geo"}]],
876
+ column_widths=[0.5, 0.5]
877
+ )
878
+
879
+ # Add clustering scatter plot
880
+ unique_clusters = sorted(storm_features_viz['cluster'].unique())
881
+ for i, cluster in enumerate(unique_clusters):
882
+ cluster_data = storm_features_viz[storm_features_viz['cluster'] == cluster]
883
+ color = CLUSTER_COLORS[i % len(CLUSTER_COLORS)] if cluster != -1 else '#CCCCCC'
884
+ cluster_name = f'Cluster {cluster}' if cluster != -1 else 'Noise'
885
+
886
+ fig.add_trace(
887
+ go.Scatter(
888
+ x=cluster_data['dim1'],
889
+ y=cluster_data['dim2'],
890
+ mode='markers',
891
+ marker=dict(color=color, size=8),
892
+ name=cluster_name,
893
+ hovertemplate=(
894
+ '<b>%{customdata[0]}</b><br>'
895
+ 'Season: %{customdata[1]}<br>'
896
+ 'Max Wind: %{customdata[2]:.0f} kt<br>'
897
+ 'Min Pressure: %{customdata[3]:.0f} hPa<br>'
898
+ 'Track Length: %{customdata[4]:.0f} points<br>'
899
+ '<extra></extra>'
900
+ ),
901
+ customdata=np.column_stack((
902
+ cluster_data['NAME'],
903
+ cluster_data['SEASON'],
904
+ cluster_data['USA_WIND_max'],
905
+ cluster_data['USA_PRES_min'],
906
+ cluster_data['track_length']
907
+ ))
908
+ ),
909
+ row=1, col=1
910
+ )
911
+
912
+ # Add route map
913
+ for i, cluster in enumerate(unique_clusters):
914
+ if cluster == -1: # Skip noise for route visualization
915
+ continue
916
+
917
+ cluster_storms = storm_features_viz[storm_features_viz['cluster'] == cluster]['SID'].tolist()
918
+ color = CLUSTER_COLORS[i % len(CLUSTER_COLORS)]
919
+
920
+ for j, sid in enumerate(cluster_storms[:10]): # Limit to 10 storms per cluster for performance
921
+ storm_track = typhoon_data[typhoon_data['SID'] == sid].sort_values('ISO_TIME')
922
+ if len(storm_track) > 1:
923
+ storm_name = storm_track['NAME'].iloc[0] if pd.notna(storm_track['NAME'].iloc[0]) else 'UNNAMED'
924
+
925
+ fig.add_trace(
926
+ go.Scattergeo(
927
+ lon=storm_track['LON'],
928
+ lat=storm_track['LAT'],
929
+ mode='lines+markers',
930
+ line=dict(color=color, width=2),
931
+ marker=dict(color=color, size=4),
932
+ name=f'C{cluster}: {storm_name}' if j == 0 else None,
933
+ showlegend=(j == 0),
934
+ hovertemplate=(
935
+ f'<b>{storm_name}</b><br>'
936
+ 'Lat: %{lat:.1f}Β°<br>'
937
+ 'Lon: %{lon:.1f}Β°<br>'
938
+ f'Cluster: {cluster}<br>'
939
+ '<extra></extra>'
940
+ )
941
+ ),
942
+ row=1, col=2
943
+ )
944
+
945
+ # Update layout
946
+ fig.update_layout(
947
+ title_text="Advanced Storm Clustering Analysis with Route Visualization",
948
+ height=600,
949
+ showlegend=True
950
+ )
951
+
952
+ # Update geo layout
953
+ fig.update_geos(
954
+ projection_type="natural earth",
955
+ showland=True,
956
+ landcolor="LightGray",
957
+ showocean=True,
958
+ oceancolor="LightBlue",
959
+ showcoastlines=True,
960
+ coastlinecolor="Gray",
961
+ center=dict(lat=20, lon=140),
962
+ row=1, col=2
963
+ )
964
+
965
+ # Update scatter plot axes
966
+ fig.update_xaxes(title_text=f"{method.upper()} Dimension 1", row=1, col=1)
967
+ fig.update_yaxes(title_text=f"{method.upper()} Dimension 2", row=1, col=1)
968
+
969
+ else:
970
+ # Simple scatter plot only
971
+ fig = px.scatter(
972
+ storm_features_viz,
973
+ x='dim1',
974
+ y='dim2',
975
+ color='cluster',
976
+ hover_data=['NAME', 'SEASON', 'USA_WIND_max', 'USA_PRES_min'],
977
+ title=f'Storm Clustering using {method.upper()}',
978
+ labels={
979
+ 'dim1': f'{method.upper()} Dimension 1',
980
+ 'dim2': f'{method.upper()} Dimension 2',
981
+ 'cluster': 'Cluster'
982
+ }
983
+ )
984
 
985
+ # Generate detailed cluster statistics
986
  cluster_stats = storm_features_viz.groupby('cluster').agg({
987
+ 'USA_WIND_max': ['mean', 'std', 'min', 'max'],
988
+ 'USA_PRES_min': ['mean', 'std', 'min', 'max'],
989
+ 'track_length': ['mean', 'std'],
990
+ 'genesis_lat': 'mean',
991
+ 'genesis_lon': 'mean',
992
+ 'total_distance': 'mean',
993
+ 'avg_curvature': 'mean',
994
  'SID': 'count'
995
  }).round(2)
996
 
997
+ # Flatten column names for readability
998
+ cluster_stats.columns = ['_'.join(col).strip() for col in cluster_stats.columns]
999
+
1000
+ stats_text = "πŸŒ€ ADVANCED CLUSTER ANALYSIS RESULTS\n" + "="*50 + "\n\n"
1001
+
1002
+ for cluster in sorted(storm_features_viz['cluster'].unique()):
1003
+ if cluster == -1:
1004
+ stats_text += f"πŸ”Έ NOISE POINTS: {cluster_stats.loc[-1, 'SID_count']} storms\n\n"
1005
+ continue
1006
+
1007
+ cluster_row = cluster_stats.loc[cluster]
1008
+ storm_count = int(cluster_row['SID_count'])
1009
+
1010
+ stats_text += f"πŸŒͺ️ CLUSTER {cluster}: {storm_count} storms\n"
1011
+ stats_text += f" Intensity: {cluster_row['USA_WIND_max_mean']:.1f} Β± {cluster_row['USA_WIND_max_std']:.1f} kt\n"
1012
+ stats_text += f" Pressure: {cluster_row['USA_PRES_min_mean']:.1f} Β± {cluster_row['USA_PRES_min_std']:.1f} hPa\n"
1013
+ stats_text += f" Track Length: {cluster_row['track_length_mean']:.1f} Β± {cluster_row['track_length_std']:.1f} points\n"
1014
+ stats_text += f" Genesis Region: {cluster_row['genesis_lat']:.1f}Β°N, {cluster_row['genesis_lon']:.1f}Β°E\n"
1015
+ stats_text += f" Avg Distance: {cluster_row['total_distance_mean']:.2f} degrees\n"
1016
+ stats_text += f" Avg Curvature: {cluster_row['avg_curvature_mean']:.3f} radians\n\n"
1017
+
1018
+ # Add feature importance summary
1019
+ stats_text += "πŸ“Š CLUSTERING FEATURES USED:\n"
1020
+ stats_text += f" β€’ Storm intensity (max/mean/std wind & pressure)\n"
1021
+ stats_text += f" β€’ Track characteristics (length, curvature, distance)\n"
1022
+ stats_text += f" β€’ Genesis location (lat/lon)\n"
1023
+ stats_text += f" β€’ Geographic range (lat/lon span)\n"
1024
+ stats_text += f" β€’ Total features: {len(feature_cols)}\n\n"
1025
+
1026
+ stats_text += f"🎯 ALGORITHM: {method.upper()} + DBSCAN clustering\n"
1027
+ stats_text += f"πŸ“ˆ CLUSTERS FOUND: {len([c for c in storm_features_viz['cluster'].unique() if c != -1])}\n"
1028
 
1029
  return fig, stats_text, storm_features_viz
1030
+
1031
  except Exception as e:
1032
+ error_fig = go.Figure()
1033
+ error_fig.add_annotation(
1034
+ text=f"Error in clustering analysis: {str(e)}",
1035
+ xref="paper", yref="paper",
1036
+ x=0.5, y=0.5, xanchor='center', yanchor='middle',
1037
+ showarrow=False, font_size=16
1038
+ )
1039
+ return error_fig, f"Error in clustering: {str(e)}", None
1040
 
1041
  # -----------------------------
1042
  # NEW: Optional CNN Implementation
 
1355
  return fig, slopes_text, regression
1356
 
1357
  def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
1358
+ """Categorize typhoon by standard with enhanced TD support - FIXED for matplotlib"""
1359
  if pd.isna(wind_speed):
1360
  return 'Tropical Depression', '#808080'
1361
 
1362
  if standard=='taiwan':
1363
  wind_speed_ms = wind_speed * 0.514444
1364
  if wind_speed_ms >= 51.0:
1365
+ return 'Strong Typhoon', '#FF0000' # Red
1366
  elif wind_speed_ms >= 33.7:
1367
+ return 'Medium Typhoon', '#FFA500' # Orange
1368
  elif wind_speed_ms >= 17.2:
1369
+ return 'Mild Typhoon', '#FFFF00' # Yellow
1370
+ return 'Tropical Depression', '#808080' # Gray
1371
  else:
1372
  if wind_speed >= 137:
1373
+ return 'C5 Super Typhoon', '#FF0000' # Red
1374
  elif wind_speed >= 113:
1375
+ return 'C4 Very Strong Typhoon', '#FFA500' # Orange
1376
  elif wind_speed >= 96:
1377
+ return 'C3 Strong Typhoon', '#FFFF00' # Yellow
1378
  elif wind_speed >= 83:
1379
+ return 'C2 Typhoon', '#00FF00' # Green
1380
  elif wind_speed >= 64:
1381
+ return 'C1 Typhoon', '#00FFFF' # Cyan
1382
  elif wind_speed >= 34:
1383
+ return 'Tropical Storm', '#0000FF' # Blue
1384
+ return 'Tropical Depression', '#808080' # Gray
1385
 
1386
  # -----------------------------
1387
  # ENHANCED: Animation Functions
 
1455
  return gr.update(choices=["Error loading storms"], value=None)
1456
 
1457
  def generate_enhanced_track_video(year, typhoon_selection, standard):
1458
+ """Enhanced track video generation with TD support and 2025 compatibility - FIXED color handling"""
1459
  if not typhoon_selection or typhoon_selection == "No storms found":
1460
  return None
1461
 
 
1466
  # Get storm data
1467
  storm_df = typhoon_data[typhoon_data['SID'] == sid].copy()
1468
  if storm_df.empty:
1469
+ print(f"No data found for storm {sid}")
1470
  return None
1471
 
1472
  # Sort by time
 
1486
  storm_name = storm_df['NAME'].iloc[0] if pd.notna(storm_df['NAME'].iloc[0]) else "UNNAMED"
1487
  season = storm_df['SEASON'].iloc[0] if 'SEASON' in storm_df.columns else year
1488
 
1489
+ print(f"Generating video for {storm_name} ({sid}) with {len(lats)} track points")
1490
+
1491
  # Create figure with enhanced map
1492
  fig, ax = plt.subplots(figsize=(14, 8), subplot_kw={'projection': ccrs.PlateCarree()})
1493
 
 
1521
  fontsize=11, verticalalignment='top',
1522
  bbox=dict(boxstyle="round,pad=0.5", facecolor='white', alpha=0.9))
1523
 
1524
+ # Color legend with TD support - FIXED
1525
  legend_elements = []
1526
+ for category in ['Tropical Depression', 'Tropical Storm', 'C1 Typhoon', 'C2 Typhoon',
1527
+ 'C3 Strong Typhoon', 'C4 Very Strong Typhoon', 'C5 Super Typhoon']:
1528
+ if category in matplotlib_color_map:
1529
+ color = get_matplotlib_color(category)
1530
+ legend_elements.append(plt.Line2D([0], [0], marker='o', color='w',
1531
+ markerfacecolor=color, markersize=8, label=category))
1532
 
1533
  ax.legend(handles=legend_elements, loc='upper right', fontsize=9)
1534
 
1535
  def animate(frame):
1536
+ try:
1537
+ if frame >= len(lats):
1538
+ return line, point, info_box
1539
+
1540
+ # Update track line
1541
+ line.set_data(lons[:frame+1], lats[:frame+1])
1542
+
1543
+ # Update current position
1544
+ current_wind = winds[frame]
1545
+ category = categorize_typhoon_enhanced(current_wind)
1546
+ color = get_matplotlib_color(category) # FIXED: Use matplotlib-compatible color
1547
+
1548
+ # Debug print for first few frames
1549
+ if frame < 3:
1550
+ print(f"Frame {frame}: Wind={current_wind:.1f}kt, Category={category}, Color={color}")
1551
+
1552
+ point.set_data([lons[frame]], [lats[frame]])
1553
+ point.set_color(color)
1554
+ point.set_markersize(8 + current_wind/10) # Size based on intensity
1555
+
1556
+ # Enhanced info display
1557
+ if 'ISO_TIME' in storm_df.columns and frame < len(storm_df):
1558
+ current_time = storm_df.iloc[frame]['ISO_TIME']
1559
+ time_str = current_time.strftime('%Y-%m-%d %H:%M UTC') if pd.notna(current_time) else 'Unknown'
1560
+ else:
1561
+ time_str = f"Step {frame+1}"
1562
+
1563
+ info_text = (
1564
+ f"Storm: {storm_name}\n"
1565
+ f"Time: {time_str}\n"
1566
+ f"Position: {lats[frame]:.1f}Β°N, {lons[frame]:.1f}Β°E\n"
1567
+ f"Max Wind: {current_wind:.0f} kt\n"
1568
+ f"Category: {category}\n"
1569
+ f"Frame: {frame+1}/{len(lats)}"
1570
+ )
1571
+ info_box.set_text(info_text)
1572
+
1573
+ return line, point, info_box
1574
+
1575
+ except Exception as e:
1576
+ print(f"Error in animate frame {frame}: {e}")
1577
  return line, point, info_box
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1578
 
1579
  # Create animation
1580
  anim = animation.FuncAnimation(
 
1592
  extra_args=['-pix_fmt', 'yuv420p'] # Better compatibility
1593
  )
1594
 
1595
+ print(f"Saving animation to {temp_file.name}")
1596
  anim.save(temp_file.name, writer=writer, dpi=100)
1597
  plt.close(fig)
1598
 
1599
+ print(f"βœ… Video generated successfully: {temp_file.name}")
1600
  return temp_file.name
1601
 
1602
  except Exception as e:
1603
+ print(f"❌ Error generating video: {e}")
1604
+ import traceback
1605
+ traceback.print_exc()
1606
  return None
1607
 
1608
+ # Simplified wrapper for backward compatibility - FIXED
1609
  def simplified_track_video(year, basin, typhoon, standard):
1610
+ """Simplified track video function with fixed color handling"""
1611
  if not typhoon:
1612
  return None
1613
  return generate_enhanced_track_video(year, typhoon, standard)
 
1676
  This dashboard provides comprehensive analysis of typhoon data in relation to ENSO phases with advanced machine learning capabilities.
1677
 
1678
  ### πŸ†• Enhanced Features:
1679
+ - **πŸ“ˆ Advanced ML Clustering**: UMAP/t-SNE storm pattern analysis with route visualization
1680
  - **πŸ€– Optional CNN Predictions**: Deep learning intensity forecasting
1681
  - **πŸŒ€ Complete TD Support**: Now includes Tropical Depressions (< 34 kt)
1682
  - **πŸ“… 2025 Data Ready**: Real-time compatibility with current year data
 
1695
  - **Platform Compatibility**: βœ… Optimized for Hugging Face Spaces
1696
  """)
1697
 
1698
+ with gr.Tab("πŸ” Advanced ML Clustering with Routes"):
1699
+ gr.Markdown("## Storm Pattern Analysis using UMAP/t-SNE with Route Visualization")
1700
+ gr.Markdown("**This tab shows both the dimensional clustering analysis AND the actual storm tracks colored by cluster**")
1701
 
1702
  with gr.Row():
1703
  reduction_method = gr.Dropdown(
 
1705
  value='UMAP' if UMAP_AVAILABLE else 't-SNE',
1706
  label="Dimensionality Reduction Method"
1707
  )
1708
+ show_routes = gr.Checkbox(
1709
+ label="Show Storm Routes on Map",
1710
+ value=True,
1711
+ info="Display actual storm tracks colored by cluster"
1712
  )
1713
 
1714
+ analyze_clusters_btn = gr.Button("🎯 Analyze Storm Clusters & Routes", variant="primary")
1715
 
1716
  with gr.Row():
1717
+ cluster_plot = gr.Plot(label="Storm Clustering with Route Visualization", height=600)
 
1718
 
1719
+ with gr.Row():
1720
+ cluster_stats = gr.Textbox(label="Detailed Cluster Statistics", lines=15, max_lines=20)
1721
+
1722
+ def run_advanced_clustering_analysis(method, show_routes):
1723
  try:
1724
  # Extract features for clustering
1725
  storm_features = extract_storm_features(typhoon_data)
1726
+ fig, stats, _ = create_advanced_clustering_visualization(storm_features, typhoon_data, method.lower(), show_routes)
1727
  return fig, stats
1728
  except Exception as e:
1729
+ import traceback
1730
+ error_details = traceback.format_exc()
1731
+ return None, f"Error: {str(e)}\n\nDetails:\n{error_details}"
1732
 
1733
  analyze_clusters_btn.click(
1734
+ fn=run_advanced_clustering_analysis,
1735
+ inputs=[reduction_method, show_routes],
1736
  outputs=[cluster_plot, cluster_stats]
1737
  )
1738
 
1739
  gr.Markdown("""
1740
+ ### 🧠 Advanced Clustering Features:
1741
+ - **🎯 Multi-dimensional Analysis**: Uses 15+ storm characteristics including intensity, track shape, genesis location
1742
+ - **πŸ—ΊοΈ Route Visualization**: Shows actual storm tracks colored by cluster membership
1743
+ - **πŸ“Š DBSCAN Clustering**: Automatically finds natural groupings without predefined cluster count
1744
+ - **πŸ“ˆ Comprehensive Stats**: Detailed cluster analysis including intensity, pressure, track length, curvature
1745
+ - **πŸ”„ Interactive**: Hover over points to see storm details, zoom and pan the route map
1746
+
1747
+ ### πŸ“– How to Interpret:
1748
+ - **Left Plot**: Each dot is a storm positioned by similarity (close = similar characteristics)
1749
+ - **Right Plot**: Actual geographic storm tracks, colored by which cluster they belong to
1750
+ - **Cluster Colors**: Each cluster gets a unique color to identify similar storm patterns
1751
+ - **Noise Points**: Gray points represent storms that don't fit clear patterns
1752
  """)
1753
 
1754
  with gr.Tab("πŸ€– Intensity Prediction"):
 
1917
  - **⚑ Optimized Export**: Fast rendering with web-compatible video formats
1918
  """)
1919
 
1920
+ with gr.Tab("πŸ“Š Data Statistics & Insights"):
1921
+ gr.Markdown("## Comprehensive Dataset Analysis")
1922
+
1923
+ # Create enhanced data summary
1924
+ try:
1925
+ if len(typhoon_data) > 0:
1926
+ # Storm category distribution
1927
+ storm_cats = typhoon_data.groupby('SID')['USA_WIND'].max().apply(categorize_typhoon_enhanced)
1928
+ cat_counts = storm_cats.value_counts()
1929
+
1930
+ # Create distribution chart with enhanced colors
1931
+ fig_dist = px.bar(
1932
+ x=cat_counts.index,
1933
+ y=cat_counts.values,
1934
+ title="Storm Intensity Distribution (Including Tropical Depressions)",
1935
+ labels={'x': 'Category', 'y': 'Number of Storms'},
1936
+ color=cat_counts.index,
1937
+ color_discrete_map=enhanced_color_map
1938
+ )
1939
+
1940
+ # Seasonal distribution
1941
+ if 'ISO_TIME' in typhoon_data.columns:
1942
+ seasonal_data = typhoon_data.copy()
1943
+ seasonal_data['Month'] = seasonal_data['ISO_TIME'].dt.month
1944
+ monthly_counts = seasonal_data.groupby(['Month', 'SID']).size().groupby('Month').size()
1945
+
1946
+ fig_seasonal = px.bar(
1947
+ x=monthly_counts.index,
1948
+ y=monthly_counts.values,
1949
+ title="Seasonal Storm Distribution",
1950
+ labels={'x': 'Month', 'y': 'Number of Storms'},
1951
+ color=monthly_counts.values,
1952
+ color_continuous_scale='Viridis'
1953
+ )
1954
+ else:
1955
+ fig_seasonal = None
1956
+
1957
+ # Basin distribution
1958
+ if 'SID' in typhoon_data.columns:
1959
+ basin_data = typhoon_data['SID'].str[:2].value_counts()
1960
+ fig_basin = px.pie(
1961
+ values=basin_data.values,
1962
+ names=basin_data.index,
1963
+ title="Distribution by Basin"
1964
+ )
1965
+ else:
1966
+ fig_basin = None
1967
+
1968
+ with gr.Row():
1969
+ gr.Plot(value=fig_dist)
1970
+
1971
+ if fig_seasonal:
1972
+ with gr.Row():
1973
+ gr.Plot(value=fig_seasonal)
1974
+
1975
+ if fig_basin:
1976
+ with gr.Row():
1977
+ gr.Plot(value=fig_basin)
1978
+
1979
+ except Exception as e:
1980
+ gr.Markdown(f"Visualization error: {str(e)}")
1981
+
1982
+ # Enhanced statistics
1983
+ total_storms = len(typhoon_data['SID'].unique()) if 'SID' in typhoon_data.columns else 0
1984
+ total_records = len(typhoon_data)
1985
+
1986
+ if 'SEASON' in typhoon_data.columns:
1987
+ year_range = f"{typhoon_data['SEASON'].min():.0f}-{typhoon_data['SEASON'].max():.0f}"
1988
+ years_covered = typhoon_data['SEASON'].nunique()
1989
+ else:
1990
+ year_range = "Unknown"
1991
+ years_covered = 0
1992
+
1993
+ if 'SID' in typhoon_data.columns:
1994
+ basins_available = ', '.join(sorted(typhoon_data['SID'].str[:2].unique()))
1995
+ avg_storms_per_year = total_storms / max(years_covered, 1)
1996
+ else:
1997
+ basins_available = "Unknown"
1998
+ avg_storms_per_year = 0
1999
+
2000
+ # TD specific statistics
2001
+ if 'USA_WIND' in typhoon_data.columns:
2002
+ td_storms = len(typhoon_data[typhoon_data['USA_WIND'] < 34]['SID'].unique())
2003
+ ts_storms = len(typhoon_data[(typhoon_data['USA_WIND'] >= 34) & (typhoon_data['USA_WIND'] < 64)]['SID'].unique())
2004
+ typhoon_storms = len(typhoon_data[typhoon_data['USA_WIND'] >= 64]['SID'].unique())
2005
+ td_percentage = (td_storms / max(total_storms, 1)) * 100
2006
+ else:
2007
+ td_storms = ts_storms = typhoon_storms = 0
2008
+ td_percentage = 0
2009
+
2010
+ gr.Markdown(f"""
2011
+ ### πŸ“ˆ Enhanced Dataset Summary:
2012
+ - **Total Unique Storms**: {total_storms:,}
2013
+ - **Total Track Records**: {total_records:,}
2014
+ - **Year Range**: {year_range} ({years_covered} years)
2015
+ - **Basins Available**: {basins_available}
2016
+ - **Average Storms/Year**: {avg_storms_per_year:.1f}
2017
+
2018
+ ### πŸŒ€ Storm Category Breakdown:
2019
+ - **Tropical Depressions**: {td_storms:,} storms ({td_percentage:.1f}%)
2020
+ - **Tropical Storms**: {ts_storms:,} storms
2021
+ - **Typhoons (C1-C5)**: {typhoon_storms:,} storms
2022
+
2023
+ ### πŸ†• New Platform Capabilities:
2024
+ - βœ… **Complete TD Analysis** - First platform to include comprehensive TD tracking
2025
+ - βœ… **Advanced ML Clustering** - DBSCAN pattern recognition with route visualization
2026
+ - βœ… **Real-time Predictions** - Physics-based and optional CNN intensity forecasting
2027
+ - βœ… **2025 Data Ready** - Full compatibility with current season data
2028
+ - βœ… **Enhanced Animations** - Professional-quality storm track videos
2029
+ - βœ… **Multi-basin Analysis** - Comprehensive Pacific and Atlantic coverage
2030
+
2031
+ ### πŸ”¬ Research Applications:
2032
+ - Climate change impact studies
2033
+ - Seasonal forecasting research
2034
+ - Storm pattern classification
2035
+ - ENSO-typhoon relationship analysis
2036
+ - Intensity prediction model development
2037
+ """)
2038
+
2039
  return demo
2040
  except Exception as e:
2041
  logging.error(f"Error creating Gradio interface: {e}")
 
2043
  with gr.Blocks() as demo:
2044
  gr.Markdown("# πŸŒͺ️ Enhanced Typhoon Analysis Platform")
2045
  gr.Markdown("**Error**: Could not load full interface. Please check logs.")
2046
+ gr.Markdown(f"Error details: {str(e)}")
2047
  return demo
2048
 
2049
+ # -----------------------------
2050
+ # Color Test Functions (Optional)
2051
+ # -----------------------------
2052
+
2053
+ def test_color_conversion():
2054
+ """Test color conversion functions"""
2055
+ print("🎨 Testing color conversion...")
2056
+
2057
+ # Test all categories
2058
+ test_winds = [25, 40, 70, 85, 100, 120, 150] # TD, TS, C1, C2, C3, C4, C5
2059
+
2060
+ for wind in test_winds:
2061
+ category = categorize_typhoon_enhanced(wind)
2062
+ plotly_color = enhanced_color_map.get(category, 'rgb(128,128,128)')
2063
+ matplotlib_color = get_matplotlib_color(category)
2064
+
2065
+ print(f"Wind: {wind:3d}kt β†’ {category:20s} β†’ Plotly: {plotly_color:15s} β†’ Matplotlib: {matplotlib_color}")
2066
+
2067
+ print("βœ… Color conversion test complete!")
2068
+
2069
+ def test_rgb_conversion():
2070
+ """Test RGB string to hex conversion"""
2071
+ test_colors = [
2072
+ 'rgb(128, 128, 128)',
2073
+ 'rgb(255, 0, 0)',
2074
+ 'rgb(0, 255, 0)',
2075
+ 'rgb(0, 0, 255)'
2076
+ ]
2077
+
2078
+ print("πŸ”§ Testing RGB to hex conversion...")
2079
+ for rgb_str in test_colors:
2080
+ hex_color = rgb_string_to_hex(rgb_str)
2081
+ print(f"{rgb_str:20s} β†’ {hex_color}")
2082
+
2083
+ print("βœ… RGB conversion test complete!")
2084
+
2085
  # Create and launch the interface
2086
  demo = create_interface()
2087