euler314 commited on
Commit
a2d2271
·
verified ·
1 Parent(s): 4137f10

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +393 -1270
app.py CHANGED
@@ -1,448 +1,197 @@
1
  import gradio as gr
2
  import plotly.graph_objects as go
3
  import plotly.express as px
4
- import pickle
5
- import tropycal.tracks as tracks
6
  import pandas as pd
7
  import numpy as np
8
- import cachetools
9
- import functools
10
- import hashlib
11
- import os
12
- import argparse
13
- from datetime import datetime, timedelta
14
- from datetime import date, datetime
15
  from scipy import stats
16
- from scipy.optimize import minimize, curve_fit
17
  from sklearn.linear_model import LinearRegression
18
  from sklearn.cluster import KMeans
19
  from scipy.interpolate import interp1d
20
  from fractions import Fraction
21
- from concurrent.futures import ThreadPoolExecutor
22
- from sklearn.metrics import mean_squared_error
23
  import statsmodels.api as sm
24
- import schedule
25
- import time
26
- import threading
27
  import requests
28
- from io import StringIO
29
  import tempfile
30
- import csv
31
- from collections import defaultdict
32
  import shutil
33
  import filecmp
 
 
 
34
 
35
- # Add command-line argument parsing
36
  parser = argparse.ArgumentParser(description='Typhoon Analysis Dashboard')
37
  parser.add_argument('--data_path', type=str, default=os.getcwd(), help='Path to the data directory')
38
  args = parser.parse_args()
39
-
40
- # Use the command-line argument for data path
41
  DATA_PATH = args.data_path
42
 
 
43
  ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
44
  TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
45
- LOCAL_iBtrace_PATH = os.path.join(DATA_PATH, 'ibtracs.WP.list.v04r01.csv')
46
  iBtrace_uri = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/ibtracs.WP.list.v04r01.csv'
47
-
48
  CACHE_FILE = 'ibtracs_cache.pkl'
49
  CACHE_EXPIRY_DAYS = 1
50
- last_oni_update = None
51
 
 
 
 
 
 
 
 
 
 
 
52
 
53
- def should_update_oni():
54
- today = datetime.now()
55
- # Beginning of the month: 1st day
56
- if today.day == 1:
57
- return True
58
- # Middle of the month: 15th day
59
- if today.day == 15:
60
- return True
61
- # End of the month: last day
62
- if today.day == (today.replace(day=1, month=today.month%12+1) - timedelta(days=1)).day:
63
- return True
64
- return False
65
 
66
- color_map = {
67
- 'C5 Super Typhoon': 'rgb(255, 0, 0)', # Red
68
- 'C4 Very Strong Typhoon': 'rgb(255, 63, 0)', # Red-Orange
69
- 'C3 Strong Typhoon': 'rgb(255, 127, 0)', # Orange
70
- 'C2 Typhoon': 'rgb(255, 191, 0)', # Orange-Yellow
71
- 'C1 Typhoon': 'rgb(255, 255, 0)', # Yellow
72
- 'Tropical Storm': 'rgb(0, 255, 255)', # Cyan
73
- 'Tropical Depression': 'rgb(173, 216, 230)' # Light Blue
74
  }
75
 
 
76
  def convert_typhoondata(input_file, output_file):
77
  with open(input_file, 'r') as infile:
78
- # Skip the title and the unit line.
79
  next(infile)
80
  next(infile)
81
-
82
  reader = csv.reader(infile)
83
-
84
- # Used for storing data for each SID
85
  sid_data = defaultdict(list)
86
-
87
  for row in reader:
88
- if not row: # Skip the blank lines
89
  continue
90
-
91
  sid = row[0]
92
  iso_time = row[6]
93
  sid_data[sid].append((row, iso_time))
94
-
95
  with open(output_file, 'w', newline='') as outfile:
96
  fieldnames = ['SID', 'ISO_TIME', 'LAT', 'LON', 'SEASON', 'NAME', 'WMO_WIND', 'WMO_PRES', 'USA_WIND', 'USA_PRES', 'START_DATE', 'END_DATE']
97
  writer = csv.DictWriter(outfile, fieldnames=fieldnames)
98
-
99
  writer.writeheader()
100
-
101
  for sid, data in sid_data.items():
102
  start_date = min(data, key=lambda x: x[1])[1]
103
  end_date = max(data, key=lambda x: x[1])[1]
104
-
105
  for row, iso_time in data:
106
  writer.writerow({
107
- 'SID': row[0],
108
- 'ISO_TIME': iso_time,
109
- 'LAT': row[8],
110
- 'LON': row[9],
111
- 'SEASON': row[1],
112
- 'NAME': row[5],
113
- 'WMO_WIND': row[10].strip() or ' ',
114
- 'WMO_PRES': row[11].strip() or ' ',
115
- 'USA_WIND': row[23].strip() or ' ',
116
- 'USA_PRES': row[24].strip() or ' ',
117
- 'START_DATE': start_date,
118
- 'END_DATE': end_date
119
  })
120
 
121
-
122
  def download_oni_file(url, filename):
123
- print(f"Downloading file from {url}...")
124
  try:
125
  response = requests.get(url)
126
- response.raise_for_status() # Raises an exception for non-200 status codes
127
  with open(filename, 'wb') as f:
128
  f.write(response.content)
129
- print(f"File successfully downloaded and saved as {filename}")
130
  return True
131
- except requests.RequestException as e:
132
- print(f"Download failed. Error: {e}")
133
  return False
134
 
135
-
136
  def convert_oni_ascii_to_csv(input_file, output_file):
137
  data = defaultdict(lambda: [''] * 12)
138
- season_to_month = {
139
- 'DJF': 12, 'JFM': 1, 'FMA': 2, 'MAM': 3, 'AMJ': 4, 'MJJ': 5,
140
- 'JJA': 6, 'JAS': 7, 'ASO': 8, 'SON': 9, 'OND': 10, 'NDJ': 11
141
- }
142
-
143
- print(f"Attempting to read file: {input_file}")
144
- try:
145
- with open(input_file, 'r') as f:
146
- lines = f.readlines()
147
- print(f"Successfully read {len(lines)} lines")
148
-
149
- if len(lines) <= 1:
150
- print("Error: File is empty or contains only header")
151
- return
152
-
153
- for line in lines[1:]: # Skip header
154
- parts = line.split()
155
- if len(parts) >= 4:
156
- season, year = parts[0], parts[1]
157
- anom = parts[-1]
158
-
159
- if season in season_to_month:
160
- month = season_to_month[season]
161
-
162
- if season == 'DJF':
163
- year = str(int(year) - 1)
164
-
165
- data[year][month-1] = anom
166
- else:
167
- print(f"Warning: Unknown season: {season}")
168
- else:
169
- print(f"Warning: Skipping invalid line: {line.strip()}")
170
-
171
- print(f"Processed data for {len(data)} years")
172
- except Exception as e:
173
- print(f"Error reading file: {e}")
174
- return
175
-
176
- print(f"Attempting to write file: {output_file}")
177
- try:
178
- with open(output_file, 'w', newline='') as f:
179
- writer = csv.writer(f)
180
- writer.writerow(['Year', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
181
-
182
- for year in sorted(data.keys()):
183
- row = [year] + data[year]
184
- writer.writerow(row)
185
-
186
- print(f"Successfully wrote {len(data)} rows of data")
187
- except Exception as e:
188
- print(f"Error writing file: {e}")
189
- return
190
-
191
- print(f"Conversion complete. Data saved to {output_file}")
192
 
193
  def update_oni_data():
194
- global last_oni_update
195
- current_date = date.today()
196
-
197
- # Check if already updated today
198
- if last_oni_update == current_date:
199
- print("ONI data already checked today. Skipping update.")
200
- return
201
-
202
  url = "https://www.cpc.ncep.noaa.gov/data/indices/oni.ascii.txt"
203
  temp_file = os.path.join(DATA_PATH, "temp_oni.ascii.txt")
204
  input_file = os.path.join(DATA_PATH, "oni.ascii.txt")
205
  output_file = ONI_DATA_PATH
206
-
207
  if download_oni_file(url, temp_file):
208
  if not os.path.exists(input_file) or not filecmp.cmp(temp_file, input_file, shallow=False):
209
- # File doesn't exist or has been updated
210
  os.replace(temp_file, input_file)
211
- print("New ONI data detected. Converting to CSV.")
212
  convert_oni_ascii_to_csv(input_file, output_file)
213
- print("ONI data updated successfully.")
214
  else:
215
- print("ONI data is up to date. No conversion needed.")
216
- os.remove(temp_file) # Remove temporary file
217
-
218
- last_oni_update = current_date
219
- else:
220
- print("Failed to download ONI data.")
221
- if os.path.exists(temp_file):
222
- os.remove(temp_file) # Ensure cleanup of temporary file
223
 
224
  def load_ibtracs_data():
225
- if os.path.exists(CACHE_FILE):
226
- cache_time = datetime.fromtimestamp(os.path.getmtime(CACHE_FILE))
227
- if datetime.now() - cache_time < timedelta(days=CACHE_EXPIRY_DAYS):
228
- print("Loading data from cache...")
229
- with open(CACHE_FILE, 'rb') as f:
230
- return pickle.load(f)
231
-
232
  if os.path.exists(LOCAL_iBtrace_PATH):
233
- print("Using local IBTrACS file...")
234
  ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
235
  else:
236
- print("Local IBTrACS file not found. Fetching data from remote server...")
237
- try:
238
- response = requests.get(iBtrace_uri)
239
- response.raise_for_status()
240
-
241
- with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv') as temp_file:
242
- temp_file.write(response.text)
243
- temp_file_path = temp_file.name
244
-
245
- # Save the downloaded data as the local file
246
- shutil.move(temp_file_path, LOCAL_iBtrace_PATH)
247
- print(f"Downloaded data saved to {LOCAL_iBtrace_PATH}")
248
-
249
- ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
250
- except requests.RequestException as e:
251
- print(f"Error downloading data: {e}")
252
- print("No local file available and download failed. Unable to load IBTrACS data.")
253
- return None
254
-
255
- with open(CACHE_FILE, 'wb') as f:
256
- pickle.dump(ibtracs, f)
257
-
258
- return ibtracs
259
-
260
- def update_ibtracs_data():
261
- global ibtracs
262
- print("Checking for IBTrACS data updates...")
263
-
264
- try:
265
- # Get the last-modified time of the remote file
266
- response = requests.head(iBtrace_uri)
267
- remote_last_modified = datetime.strptime(response.headers['Last-Modified'], '%a, %d %b %Y %H:%M:%S GMT')
268
-
269
- # Get the last-modified time of the local file
270
- if os.path.exists(LOCAL_iBtrace_PATH):
271
- local_last_modified = datetime.fromtimestamp(os.path.getmtime(LOCAL_iBtrace_PATH))
272
- else:
273
- local_last_modified = datetime.min
274
-
275
- # Compare the modification times
276
- if remote_last_modified <= local_last_modified:
277
- print("Local IBTrACS data is up to date. No update needed.")
278
- if os.path.exists(CACHE_FILE):
279
- # Update the cache file's timestamp to extend its validity
280
- os.utime(CACHE_FILE, None)
281
- print("Cache file timestamp updated.")
282
- return
283
-
284
- print("Remote data is newer. Updating IBTrACS data...")
285
-
286
- # Download the new data
287
  response = requests.get(iBtrace_uri)
288
  response.raise_for_status()
289
-
290
  with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv') as temp_file:
291
  temp_file.write(response.text)
292
  temp_file_path = temp_file.name
293
-
294
- # Save the downloaded data as the local file
295
  shutil.move(temp_file_path, LOCAL_iBtrace_PATH)
296
- print(f"Downloaded data saved to {LOCAL_iBtrace_PATH}")
297
-
298
- # Update the last modified time of the local file to match the remote file
299
- os.utime(LOCAL_iBtrace_PATH, (remote_last_modified.timestamp(), remote_last_modified.timestamp()))
300
-
301
  ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
302
-
303
- with open(CACHE_FILE, 'wb') as f:
304
- pickle.dump(ibtracs, f)
305
- print("IBTrACS data updated and cache refreshed.")
306
-
307
- except requests.RequestException as e:
308
- print(f"Error checking or downloading data: {e}")
309
- if os.path.exists(LOCAL_iBtrace_PATH):
310
- print("Using existing local file.")
311
- ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
312
- if os.path.exists(CACHE_FILE):
313
- # Update the cache file's timestamp even when using existing local file
314
- os.utime(CACHE_FILE, None)
315
- print("Cache file timestamp updated.")
316
- else:
317
- print("No local file available. Update failed.")
318
-
319
- def run_schedule():
320
- while True:
321
- schedule.run_pending()
322
- time.sleep(1)
323
-
324
- def analyze_typhoon_generation(merged_data, start_date, end_date):
325
- filtered_data = merged_data[
326
- (merged_data['ISO_TIME'] >= start_date) &
327
- (merged_data['ISO_TIME'] <= end_date)
328
- ]
329
-
330
- filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
331
-
332
- typhoon_counts = filtered_data['ENSO_Phase'].value_counts().to_dict()
333
-
334
- month_counts = filtered_data.groupby(['ENSO_Phase', filtered_data['ISO_TIME'].dt.month]).size().unstack(fill_value=0)
335
- concentrated_months = month_counts.idxmax(axis=1).to_dict()
336
-
337
- return typhoon_counts, concentrated_months
338
-
339
- def cache_key_generator(*args, **kwargs):
340
- key = hashlib.md5()
341
- for arg in args:
342
- key.update(str(arg).encode())
343
- for k, v in sorted(kwargs.items()):
344
- key.update(str(k).encode())
345
- key.update(str(v).encode())
346
- return key.hexdigest()
347
-
348
- def categorize_typhoon(wind_speed):
349
- wind_speed_kt = wind_speed / 2 # Convert kt to m/s
350
-
351
- # Add category classification
352
- if wind_speed_kt >= 137/2.35:
353
- return 'C5 Super Typhoon'
354
- elif wind_speed_kt >= 113/2.35:
355
- return 'C4 Very Strong Typhoon'
356
- elif wind_speed_kt >= 96/2.35:
357
- return 'C3 Strong Typhoon'
358
- elif wind_speed_kt >= 83/2.35:
359
- return 'C2 Typhoon'
360
- elif wind_speed_kt >= 64/2.35:
361
- return 'C1 Typhoon'
362
- elif wind_speed_kt >= 34/2.35:
363
- return 'Tropical Storm'
364
- else:
365
- return 'Tropical Depression'
366
-
367
- @functools.lru_cache(maxsize=None)
368
- def process_oni_data_cached(oni_data_hash):
369
- return process_oni_data(oni_data)
370
 
371
  def process_oni_data(oni_data):
372
  oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
373
- oni_long['Month'] = oni_long['Month'].map({
374
- 'Jan': '01', 'Feb': '02', 'Mar': '03', 'Apr': '04', 'May': '05', 'Jun': '06',
375
- 'Jul': '07', 'Aug': '08', 'Sep': '09', 'Oct': '10', 'Nov': '11', 'Dec': '12'
376
- })
377
  oni_long['Date'] = pd.to_datetime(oni_long['Year'].astype(str) + '-' + oni_long['Month'] + '-01')
378
  oni_long['ONI'] = pd.to_numeric(oni_long['ONI'], errors='coerce')
379
  return oni_long
380
 
381
- def process_oni_data_with_cache(oni_data):
382
- oni_data_hash = cache_key_generator(oni_data.to_json())
383
- return process_oni_data_cached(oni_data_hash)
384
-
385
- @functools.lru_cache(maxsize=None)
386
- def process_typhoon_data_cached(typhoon_data_hash):
387
- return process_typhoon_data(typhoon_data)
388
-
389
  def process_typhoon_data(typhoon_data):
390
  typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
391
  typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
392
  typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce')
393
  typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce')
394
-
395
  typhoon_max = typhoon_data.groupby('SID').agg({
396
- 'USA_WIND': 'max',
397
- 'USA_PRES': 'min',
398
- 'ISO_TIME': 'first',
399
- 'SEASON': 'first',
400
- 'NAME': 'first',
401
- 'LAT': 'first',
402
- 'LON': 'first'
403
  }).reset_index()
404
-
405
  typhoon_max['Month'] = typhoon_max['ISO_TIME'].dt.strftime('%m')
406
  typhoon_max['Year'] = typhoon_max['ISO_TIME'].dt.year
407
  typhoon_max['Category'] = typhoon_max['USA_WIND'].apply(categorize_typhoon)
408
  return typhoon_max
409
 
410
- def process_typhoon_data_with_cache(typhoon_data):
411
- typhoon_data_hash = cache_key_generator(typhoon_data.to_json())
412
- return process_typhoon_data_cached(typhoon_data_hash)
413
-
414
  def merge_data(oni_long, typhoon_max):
415
  return pd.merge(typhoon_max, oni_long, on=['Year', 'Month'])
416
 
417
- def calculate_logistic_regression(merged_data):
418
- data = merged_data.dropna(subset=['USA_WIND', 'ONI'])
419
-
420
- # Create binary outcome for severe typhoons
421
- data['severe_typhoon'] = (data['USA_WIND'] >= 51).astype(int)
422
-
423
- # Create binary predictor for El Niño
424
- data['el_nino'] = (data['ONI'] >= 0.5).astype(int)
425
-
426
- X = data['el_nino']
427
- X = sm.add_constant(X) # Add constant term
428
- y = data['severe_typhoon']
429
-
430
- model = sm.Logit(y, X).fit()
431
-
432
- beta_1 = model.params['el_nino']
433
- exp_beta_1 = np.exp(beta_1)
434
- p_value = model.pvalues['el_nino']
435
-
436
- return beta_1, exp_beta_1, p_value
437
-
438
- @cachetools.cached(cache={})
439
- def fetch_oni_data_from_csv(file_path):
440
- df = pd.read_csv(file_path, sep=',', header=0, na_values='-99.90')
441
- df.columns = ['Year', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
442
- df = df.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
443
- df['Date'] = pd.to_datetime(df['Year'].astype(str) + df['Month'], format='%Y%b')
444
- df = df.set_index('Date')
445
- return df
446
 
447
  def classify_enso_phases(oni_value):
448
  if isinstance(oni_value, pd.Series):
@@ -454,118 +203,243 @@ def classify_enso_phases(oni_value):
454
  else:
455
  return 'Neutral'
456
 
457
- def load_data(oni_data_path, typhoon_data_path):
458
- oni_data = pd.read_csv(oni_data_path)
459
- typhoon_data = pd.read_csv(typhoon_data_path, low_memory=False)
460
-
461
- typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
462
-
463
- typhoon_data = typhoon_data.dropna(subset=['ISO_TIME'])
464
-
465
- print(f"Typhoon data shape after cleaning: {typhoon_data.shape}")
466
- print(f"Year range: {typhoon_data['ISO_TIME'].dt.year.min()} - {typhoon_data['ISO_TIME'].dt.year.max()}")
467
-
468
- return oni_data, typhoon_data
469
-
470
- def preprocess_data(oni_data, typhoon_data):
471
- typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
472
- typhoon_data['WMO_PRES'] = pd.to_numeric(typhoon_data['WMO_PRES'], errors='coerce')
473
- typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
474
- typhoon_data['Year'] = typhoon_data['ISO_TIME'].dt.year
475
- typhoon_data['Month'] = typhoon_data['ISO_TIME'].dt.month
476
-
477
- monthly_max_wind_speed = typhoon_data.groupby(['Year', 'Month'])['USA_WIND'].max().reset_index()
478
-
479
- oni_data_long = pd.melt(oni_data, id_vars=['Year'], var_name='Month', value_name='ONI')
480
- oni_data_long['Month'] = oni_data_long['Month'].apply(lambda x: pd.to_datetime(x, format='%b').month)
481
-
482
- merged_data = pd.merge(monthly_max_wind_speed, oni_data_long, on=['Year', 'Month'])
483
-
484
- return merged_data
485
-
486
- def calculate_max_wind_min_pressure(typhoon_data):
487
- max_wind_speed = typhoon_data['USA_WIND'].max()
488
- min_pressure = typhoon_data['WMO_PRES'].min()
489
- return max_wind_speed, min_pressure
490
-
491
- @functools.lru_cache(maxsize=None)
492
- def get_storm_data(storm_id):
493
- return ibtracs.get_storm(storm_id)
494
-
495
  def filter_west_pacific_coordinates(lons, lats):
496
  mask = (100 <= lons) & (lons <= 180) & (0 <= lats) & (lats <= 40)
497
  return lons[mask], lats[mask]
498
 
499
- def polynomial_exp(x, a, b, c, d):
500
- return a * x**2 + b * x + c + d * np.exp(x)
501
-
502
- def exponential(x, a, b, c):
503
- return a * np.exp(b * x) + c
504
-
505
- def generate_cluster_equations(cluster_center):
506
- X = cluster_center[:, 0] # Longitudes
507
- y = cluster_center[:, 1] # Latitudes
508
-
509
- x_min = X.min()
510
- x_max = X.max()
511
-
512
- equations = []
513
-
514
- # Fourier Series (up to 4th order)
515
- def fourier_series(x, a0, a1, b1, a2, b2, a3, b3, a4, b4):
516
- return (a0 + a1*np.cos(x) + b1*np.sin(x) +
517
- a2*np.cos(2*x) + b2*np.sin(2*x) +
518
- a3*np.cos(3*x) + b3*np.sin(3*x) +
519
- a4*np.cos(4*x) + b4*np.sin(4*x))
520
-
521
- # Normalize X to the range [0, 2π]
522
- X_normalized = 2 * np.pi * (X - x_min) / (x_max - x_min)
523
-
524
- params, _ = curve_fit(fourier_series, X_normalized, y)
525
- a0, a1, b1, a2, b2, a3, b3, a4, b4 = params
526
-
527
- # Create the equation string
528
- fourier_eq = (f"y = {a0:.4f} + {a1:.4f}*cos(x) + {b1:.4f}*sin(x) + "
529
- f"{a2:.4f}*cos(2x) + {b2:.4f}*sin(2x) + "
530
- f"{a3:.4f}*cos(3x) + {b3:.4f}*sin(3x) + "
531
- f"{a4:.4f}*cos(4x) + {b4:.4f}*sin(4x)")
532
-
533
- equations.append(("Fourier Series", fourier_eq))
534
- equations.append(("X Range", f"x goes from 0 to {2*np.pi:.4f}"))
535
- equations.append(("Longitude Range", f"Longitude goes from {x_min:.4f}°E to {x_max:.4f}°E"))
536
-
537
- return equations, (x_min, x_max)
538
-
539
-
540
-
541
-
542
- # Classification standards
543
- atlantic_standard = {
544
- 'C5 Super Typhoon': {'wind_speed': 137, 'color': 'rgb(255, 0, 0)'},
545
- 'C4 Very Strong Typhoon': {'wind_speed': 113, 'color': 'rgb(255, 63, 0)'},
546
- 'C3 Strong Typhoon': {'wind_speed': 96, 'color': 'rgb(255, 127, 0)'},
547
- 'C2 Typhoon': {'wind_speed': 83, 'color': 'rgb(255, 191, 0)'},
548
- 'C1 Typhoon': {'wind_speed': 64, 'color': 'rgb(255, 255, 0)'},
549
- 'Tropical Storm': {'wind_speed': 34, 'color': 'rgb(0, 255, 255)'},
550
- 'Tropical Depression': {'wind_speed': 0, 'color': 'rgb(173, 216, 230)'}
551
- }
552
-
553
- taiwan_standard = {
554
- 'Strong Typhoon': {'wind_speed': 51.0, 'color': 'rgb(255, 0, 0)'}, # >= 51.0 m/s
555
- 'Medium Typhoon': {'wind_speed': 33.7, 'color': 'rgb(255, 127, 0)'}, # 33.7-50.9 m/s
556
- 'Mild Typhoon': {'wind_speed': 17.2, 'color': 'rgb(255, 255, 0)'}, # 17.2-33.6 m/s
557
- 'Tropical Depression': {'wind_speed': 0, 'color': 'rgb(173, 216, 230)'} # < 17.2 m/s
558
- }
559
 
560
- def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
561
- """
562
- Categorize typhoon based on wind speed and chosen standard
563
- wind_speed is in knots
564
- """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
565
  if standard == 'taiwan':
566
- # Convert knots to m/s for Taiwan standard
567
  wind_speed_ms = wind_speed * 0.514444
568
-
569
  if wind_speed_ms >= 51.0:
570
  return 'Strong Typhoon', taiwan_standard['Strong Typhoon']['color']
571
  elif wind_speed_ms >= 33.7:
@@ -575,7 +449,6 @@ def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
575
  else:
576
  return 'Tropical Depression', taiwan_standard['Tropical Depression']['color']
577
  else:
578
- # Atlantic standard uses knots
579
  if wind_speed >= 137:
580
  return 'C5 Super Typhoon', atlantic_standard['C5 Super Typhoon']['color']
581
  elif wind_speed >= 113:
@@ -591,830 +464,80 @@ def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
591
  else:
592
  return 'Tropical Depression', atlantic_standard['Tropical Depression']['color']
593
 
594
- # Initialize data at startup
595
- def initialize_data():
596
- global oni_df, ibtracs, oni_data, typhoon_data, oni_long, typhoon_max, merged_data, data, max_wind_speed, min_pressure
597
-
598
- print(f"Using data path: {DATA_PATH}")
599
- # Update ONI data before starting the application
600
- update_oni_data()
601
- oni_df = fetch_oni_data_from_csv(ONI_DATA_PATH)
602
- ibtracs = load_ibtracs_data()
603
-
604
- if os.path.exists(LOCAL_iBtrace_PATH):
605
- convert_typhoondata(LOCAL_iBtrace_PATH, TYPHOON_DATA_PATH)
606
-
607
- oni_data, typhoon_data = load_data(ONI_DATA_PATH, TYPHOON_DATA_PATH)
608
- oni_long = process_oni_data(oni_data)
609
- typhoon_max = process_typhoon_data(typhoon_data)
610
- merged_data = merge_data(oni_long, typhoon_max)
611
- data = preprocess_data(oni_data, typhoon_data)
612
- max_wind_speed, min_pressure = calculate_max_wind_min_pressure(typhoon_data)
613
-
614
- # Schedule data updates
615
- schedule.every().day.at("01:00").do(update_ibtracs_data)
616
- schedule.every().day.at("00:00").do(lambda: update_oni_data() if should_update_oni() else None)
617
-
618
- # Run the scheduler in a separate thread
619
- scheduler_thread = threading.Thread(target=run_schedule)
620
- scheduler_thread.daemon = True
621
- scheduler_thread.start()
622
-
623
- return oni_df, ibtracs, typhoon_data
624
-
625
- # Function to get available years from typhoon data
626
- def get_available_years():
627
- if typhoon_data is None:
628
- return []
629
- years = typhoon_data['ISO_TIME'].dt.year.unique()
630
- years = years[~np.isnan(years)]
631
- years = sorted(years)
632
- return years
633
-
634
- # Function to get available typhoons for a selected year
635
- # Function to get available typhoons for a selected year
636
-
637
- def create_typhoon_path_animation(year, typhoon_id, standard):
638
- """Create animation for typhoon path using a similar approach to the Dash version"""
639
- if not year or not typhoon_id:
640
- empty_fig = go.Figure()
641
- empty_fig.add_annotation(text="Please select a year and typhoon",
642
- xref="paper", yref="paper", x=0.5, y=0.5, showarrow=False)
643
- return empty_fig
644
-
645
- try:
646
- # For debugging
647
- print(f"Creating animation for year: {year}, typhoon: {typhoon_id}, standard: {standard}")
648
-
649
- # If the input is a display name instead of an ID, extract the ID
650
- if isinstance(typhoon_id, str) and "(" in typhoon_id and ")" in typhoon_id:
651
- storm_id = typhoon_id.split("(")[-1].split(")")[0].strip()
652
- else:
653
- storm_id = typhoon_id
654
-
655
- print(f"Using storm ID: {storm_id}")
656
-
657
- storm = ibtracs.get_storm(storm_id)
658
- if storm is None:
659
- print(f"Storm not found with ID: {storm_id}")
660
- empty_fig = go.Figure()
661
- empty_fig.add_annotation(text=f"Storm not found with ID: {storm_id}",
662
- xref="paper", yref="paper", x=0.5, y=0.5, showarrow=False)
663
- return empty_fig
664
-
665
- fig = go.Figure()
666
-
667
- # Add the full path
668
- fig.add_trace(
669
- go.Scattergeo(
670
- lon=storm.lon,
671
- lat=storm.lat,
672
- mode='lines',
673
- line=dict(width=2, color='gray'),
674
- name='Path',
675
- showlegend=False,
676
- )
677
- )
678
-
679
- # Add the starting point
680
- fig.add_trace(
681
- go.Scattergeo(
682
- lon=[storm.lon[0]],
683
- lat=[storm.lat[0]],
684
- mode='markers',
685
- marker=dict(size=10, color='green', symbol='star'),
686
- name='Starting Point',
687
- text=storm.time[0].strftime('%Y-%m-%d %H:%M'),
688
- hoverinfo='text+name',
689
- )
690
- )
691
-
692
- # Create frames for animation
693
- frames = []
694
- for i in range(len(storm.time)):
695
- category, color = categorize_typhoon_by_standard(storm.vmax[i], standard)
696
-
697
- # Get additional data if available
698
- r34_ne = storm.dict['USA_R34_NE'][i] if 'USA_R34_NE' in storm.dict else None
699
- r34_se = storm.dict['USA_R34_SE'][i] if 'USA_R34_SE' in storm.dict else None
700
- r34_sw = storm.dict['USA_R34_SW'][i] if 'USA_R34_SW' in storm.dict else None
701
- r34_nw = storm.dict['USA_R34_NW'][i] if 'USA_R34_NW' in storm.dict else None
702
- rmw = storm.dict['USA_RMW'][i] if 'USA_RMW' in storm.dict else None
703
- eye_diameter = storm.dict['USA_EYE'][i] if 'USA_EYE' in storm.dict else None
704
-
705
- radius_info = f"R34: NE={r34_ne}, SE={r34_se}, SW={r34_sw}, NW={r34_nw}<br>"
706
- radius_info += f"RMW: {rmw}<br>"
707
- radius_info += f"Eye Diameter: {eye_diameter}"
708
-
709
- frame_data = [
710
- go.Scattergeo(
711
- lon=storm.lon[:i+1],
712
- lat=storm.lat[:i+1],
713
- mode='lines',
714
- line=dict(width=2, color='blue'),
715
- name='Path Traveled',
716
- showlegend=False,
717
- ),
718
- go.Scattergeo(
719
- lon=[storm.lon[i]],
720
- lat=[storm.lat[i]],
721
- mode='markers+text',
722
- marker=dict(size=10, color=color, symbol='star'),
723
- text=category,
724
- textposition="top center",
725
- textfont=dict(size=12, color=color),
726
- name='Current Location',
727
- hovertext=f"{storm.time[i].strftime('%Y-%m-%d %H:%M')}<br>"
728
- f"Category: {category}<br>"
729
- f"Wind Speed: {storm.vmax[i]:.1f} m/s<br>"
730
- f"{radius_info}",
731
- hoverinfo='text',
732
- ),
733
- ]
734
- frames.append(go.Frame(data=frame_data, name=f"frame{i}"))
735
-
736
- fig.frames = frames
737
-
738
- # Update layout with animation controls
739
- fig.update_layout(
740
- title=f"{year} Year {storm.name} Typhoon Path",
741
- showlegend=False,
742
- geo=dict(
743
- projection_type='natural earth',
744
- showland=True,
745
- landcolor='rgb(243, 243, 243)',
746
- countrycolor='rgb(204, 204, 204)',
747
- coastlinecolor='rgb(100, 100, 100)',
748
- showocean=True,
749
- oceancolor='rgb(230, 250, 255)',
750
- ),
751
- updatemenus=[{
752
- "buttons": [
753
- {
754
- "args": [None, {"frame": {"duration": 100, "redraw": True},
755
- "fromcurrent": True,
756
- "transition": {"duration": 0}}],
757
- "label": "Play",
758
- "method": "animate"
759
- },
760
- {
761
- "args": [[None], {"frame": {"duration": 0, "redraw": True},
762
- "mode": "immediate",
763
- "transition": {"duration": 0}}],
764
- "label": "Pause",
765
- "method": "animate"
766
- }
767
- ],
768
- "direction": "left",
769
- "pad": {"r": 10, "t": 87},
770
- "showactive": False,
771
- "type": "buttons",
772
- "x": 0.1,
773
- "xanchor": "right",
774
- "y": 0,
775
- "yanchor": "top"
776
- }],
777
- sliders=[{
778
- "active": 0,
779
- "yanchor": "top",
780
- "xanchor": "left",
781
- "currentvalue": {
782
- "font": {"size": 20},
783
- "prefix": "Time: ",
784
- "visible": True,
785
- "xanchor": "right"
786
- },
787
- "transition": {"duration": 100, "easing": "cubic-in-out"},
788
- "pad": {"b": 10, "t": 50},
789
- "len": 0.9,
790
- "x": 0.1,
791
- "y": 0,
792
- "steps": [
793
- {
794
- "args": [[f"frame{k}"],
795
- {"frame": {"duration": 100, "redraw": True},
796
- "mode": "immediate",
797
- "transition": {"duration": 0}}
798
- ],
799
- "label": storm.time[k].strftime('%Y-%m-%d %H:%M'),
800
- "method": "animate"
801
- }
802
- for k in range(len(storm.time))
803
- ]
804
- }]
805
- )
806
-
807
- return fig
808
- except Exception as e:
809
- print(f"Error creating typhoon path animation: {str(e)}")
810
- error_fig = go.Figure()
811
- error_fig.add_annotation(text=f"Error: {str(e)}",
812
- xref="paper", yref="paper", x=0.5, y=0.5, showarrow=False)
813
- return error_fig
814
-
815
-
816
- # Function to analyze typhoon tracks
817
- # Function to analyze typhoon tracks
818
- def analyze_typhoon_tracks(start_year, start_month, end_year, end_month, enso_selection, typhoon_search=""):
819
- start_date = datetime(int(start_year), int(start_month), 1)
820
- end_date = datetime(int(end_year), int(end_month), 28)
821
-
822
- # Create typhoon tracks plot
823
- fig_tracks = go.Figure()
824
-
825
- # Map Gradio dropdown values to the values used in the original code
826
- enso_map = {
827
- "All Years": "all",
828
- "El Niño Years": "el_nino",
829
- "La Niña Years": "la_nina",
830
- "Neutral Years": "neutral"
831
- }
832
- enso_value = enso_map[enso_selection]
833
-
834
- try:
835
- processed_storms = 0
836
- for year in range(int(start_year), int(end_year) + 1):
837
- if year not in ibtracs.data.keys():
838
- continue
839
-
840
- season = ibtracs.get_season(year)
841
- for storm_id in season.summary()['id']:
842
- try:
843
- storm = get_storm_data(storm_id)
844
- storm_dates = storm.time
845
-
846
- if any(start_date <= date <= end_date for date in storm_dates):
847
- storm_date_str = storm_dates[0].strftime('%Y-%b')
848
- storm_oni = None
849
-
850
- # Find the ONI value - handle case where date might not be in index
851
- if storm_date_str in oni_df.index:
852
- storm_oni = oni_df.loc[storm_date_str]['ONI']
853
- if isinstance(storm_oni, pd.Series):
854
- storm_oni = storm_oni.iloc[0]
855
- else:
856
- # Try to find closest date
857
- closest_dates = oni_df.index[oni_df.index.year == storm_dates[0].year]
858
- if len(closest_dates) > 0:
859
- closest_date = min(closest_dates, key=lambda x: abs((x - storm_dates[0].to_pydatetime()).total_seconds()))
860
- storm_oni = oni_df.loc[closest_date]['ONI']
861
- if isinstance(storm_oni, pd.Series):
862
- storm_oni = storm_oni.iloc[0]
863
-
864
- if storm_oni is not None:
865
- phase = classify_enso_phases(storm_oni)
866
-
867
- if (enso_value == 'all' or
868
- (enso_value == 'el_nino' and phase == 'El Nino') or
869
- (enso_value == 'la_nina' and phase == 'La Nina') or
870
- (enso_value == 'neutral' and phase == 'Neutral')):
871
-
872
- color = {'El Nino': 'red', 'La Nina': 'blue', 'Neutral': 'green'}[phase]
873
-
874
- # Highlight searched typhoon
875
- if typhoon_search and typhoon_search.lower() in storm.name.lower():
876
- line_width = 5
877
- line_color = 'yellow'
878
- else:
879
- line_width = 2
880
- line_color = color
881
-
882
- fig_tracks.add_trace(go.Scattergeo(
883
- lon=storm.lon,
884
- lat=storm.lat,
885
- mode='lines',
886
- name=storm.name,
887
- text=f'{storm.name} ({year})',
888
- hoverinfo='text',
889
- line=dict(width=line_width, color=line_color)
890
- ))
891
- processed_storms += 1
892
- except Exception as e:
893
- print(f"Error processing storm {storm_id}: {e}")
894
- continue
895
-
896
- print(f"Processed {processed_storms} storms for track display.")
897
-
898
- fig_tracks.update_layout(
899
- title=f'Typhoon Tracks from {start_year}-{start_month} to {end_year}-{end_month}',
900
- geo=dict(
901
- projection_type='natural earth',
902
- showland=True,
903
- coastlinecolor='rgb(100, 100, 100)',
904
- countrycolor='rgb(204, 204, 204)',
905
- showocean=True,
906
- oceancolor='rgb(230, 250, 255)',
907
- )
908
- )
909
-
910
- # Calculate statistics for this period
911
- filtered_data = merged_data[
912
- (merged_data['Year'] >= int(start_year)) &
913
- (merged_data['Year'] <= int(end_year)) &
914
- (merged_data['Month'].astype(int) >= int(start_month)) &
915
- (merged_data['Month'].astype(int) <= int(end_month))
916
- ]
917
-
918
- max_wind = filtered_data['USA_WIND'].max() if not filtered_data.empty else 0
919
- min_press = filtered_data['USA_PRES'].min() if not filtered_data.empty else 0
920
-
921
- stats_text = f"Maximum Wind Speed: {max_wind:.2f} knots\nMinimum Pressure: {min_press:.2f} hPa\nTotal Storms: {processed_storms}"
922
-
923
- # Create wind scatter plot
924
- wind_oni_scatter = px.scatter(filtered_data, x='ONI', y='USA_WIND', color='Category',
925
- hover_data=['NAME', 'Year', 'Category'],
926
- title='Wind Speed vs ONI',
927
- labels={'ONI': 'ONI Value', 'USA_WIND': 'Maximum Wind Speed (knots)'},
928
- color_discrete_map=color_map)
929
-
930
- # Create pressure scatter plot
931
- pressure_oni_scatter = px.scatter(filtered_data, x='ONI', y='USA_PRES', color='Category',
932
- hover_data=['NAME', 'Year', 'Category'],
933
- title='Pressure vs ONI',
934
- labels={'ONI': 'ONI Value', 'USA_PRES': 'Minimum Pressure (hPa)'},
935
- color_discrete_map=color_map)
936
-
937
- return fig_tracks, wind_oni_scatter, pressure_oni_scatter, stats_text
938
- except Exception as e:
939
- error_fig = go.Figure()
940
- error_fig.add_annotation(text=f"Error: {str(e)}", xref="paper", yref="paper", x=0.5, y=0.5, showarrow=False)
941
- return error_fig, error_fig, error_fig, f"Error analyzing typhoon tracks: {str(e)}"
942
-
943
- # Function to run cluster analysis
944
- def run_cluster_analysis(start_year, start_month, end_year, end_month, n_clusters, enso_selection, analysis_type):
945
- start_date = datetime(int(start_year), int(start_month), 1)
946
- end_date = datetime(int(end_year), int(end_month), 28)
947
-
948
- # Map Gradio dropdown values to the values used in the original code
949
- enso_map = {
950
- "All Years": "all",
951
- "El Niño Years": "el_nino",
952
- "La Niña Years": "la_nina",
953
- "Neutral Years": "neutral"
954
- }
955
- enso_value = enso_map[enso_selection]
956
-
957
- fig_routes = go.Figure()
958
-
959
- try:
960
- # Clustering analysis
961
- west_pacific_storms = []
962
- for year in range(int(start_year), int(end_year) + 1):
963
- if year not in ibtracs.data.keys():
964
- continue
965
-
966
- season = ibtracs.get_season(year)
967
- for storm_id in season.summary()['id']:
968
- storm = get_storm_data(storm_id)
969
- storm_date = storm.time[0]
970
-
971
- # Try to find the ONI value for this storm date
972
- date_str = storm_date.strftime('%Y-%b')
973
- if date_str in oni_df.index:
974
- storm_oni = oni_df.loc[date_str]['ONI']
975
- if isinstance(storm_oni, pd.Series):
976
- storm_oni = storm_oni.iloc[0]
977
- storm_phase = classify_enso_phases(storm_oni)
978
-
979
- if enso_value == 'all' or \
980
- (enso_value == 'el_nino' and storm_phase == 'El Nino') or \
981
- (enso_value == 'la_nina' and storm_phase == 'La Nina') or \
982
- (enso_value == 'neutral' and storm_phase == 'Neutral'):
983
- lons, lats = filter_west_pacific_coordinates(np.array(storm.lon), np.array(storm.lat))
984
- if len(lons) > 1: # Ensure the storm has a valid path in West Pacific
985
- west_pacific_storms.append((lons, lats))
986
-
987
- if not west_pacific_storms:
988
- return None, "No storms found matching the criteria"
989
-
990
- max_length = max(len(storm[0]) for storm in west_pacific_storms)
991
- standardized_routes = []
992
-
993
- for lons, lats in west_pacific_storms:
994
- if len(lons) < 2: # Skip if not enough points
995
- continue
996
- t = np.linspace(0, 1, len(lons))
997
- t_new = np.linspace(0, 1, max_length)
998
- lon_interp = interp1d(t, lons, kind='linear')(t_new)
999
- lat_interp = interp1d(t, lats, kind='linear')(t_new)
1000
- route_vector = np.column_stack((lon_interp, lat_interp)).flatten()
1001
- standardized_routes.append(route_vector)
1002
-
1003
- if not standardized_routes:
1004
- return None, "Unable to create standardized routes"
1005
-
1006
- kmeans = KMeans(n_clusters=int(n_clusters), random_state=42, n_init=10)
1007
- clusters = kmeans.fit_predict(standardized_routes)
1008
-
1009
- # Count the number of typhoons in each cluster
1010
- cluster_counts = np.bincount(clusters)
1011
-
1012
- # Draw all routes (with lighter color)
1013
- if analysis_type == "Show Routes":
1014
- for lons, lats in west_pacific_storms:
1015
- fig_routes.add_trace(go.Scattergeo(
1016
- lon=lons, lat=lats,
1017
- mode='lines',
1018
- line=dict(width=1, color='lightgray'),
1019
- showlegend=False,
1020
- hoverinfo='none'
1021
- ))
1022
-
1023
- equations_output = ""
1024
- # Draw cluster centroids
1025
- if analysis_type == "Show Clusters" or analysis_type == "Fourier Series":
1026
- for i in range(int(n_clusters)):
1027
- cluster_center = kmeans.cluster_centers_[i].reshape(-1, 2)
1028
-
1029
- fig_routes.add_trace(go.Scattergeo(
1030
- lon=cluster_center[:, 0],
1031
- lat=cluster_center[:, 1],
1032
- mode='lines',
1033
- name=f'Cluster {i+1} (n={cluster_counts[i]})',
1034
- line=dict(width=3)
1035
- ))
1036
-
1037
- if analysis_type == "Fourier Series":
1038
- cluster_equations, (lon_min, lon_max) = generate_cluster_equations(cluster_center)
1039
-
1040
- equations_output += f"\n--- Cluster {i+1} (Typhoons: {cluster_counts[i]}) ---\n"
1041
- for name, eq in cluster_equations:
1042
- equations_output += f"{name}: {eq}\n"
1043
-
1044
- equations_output += "\nTo use in GeoGebra:\n"
1045
- equations_output += f"1. Set x-axis from 0 to {2*np.pi:.4f}\n"
1046
- equations_output += "2. Use the equation as is\n"
1047
- equations_output += f"3. To convert x back to longitude: lon = {lon_min:.4f} + x * {(lon_max - lon_min) / (2*np.pi):.4f}\n\n"
1048
-
1049
- enso_phase_text = {
1050
- 'all': 'All Years',
1051
- 'el_nino': 'El Niño Years',
1052
- 'la_nina': 'La Niña Years',
1053
- 'neutral': 'Neutral Years'
1054
- }
1055
-
1056
- fig_routes.update_layout(
1057
- title=f'Typhoon Routes Clustering in West Pacific ({start_year}-{end_year}) - {enso_phase_text[enso_value]}',
1058
- geo=dict(
1059
- projection_type='mercator',
1060
- showland=True,
1061
- landcolor='rgb(243, 243, 243)',
1062
- countrycolor='rgb(204, 204, 204)',
1063
- coastlinecolor='rgb(100, 100, 100)',
1064
- showocean=True,
1065
- oceancolor='rgb(230, 250, 255)',
1066
- lataxis={'range': [0, 40]},
1067
- lonaxis={'range': [100, 180]},
1068
- center={'lat': 20, 'lon': 140},
1069
- ),
1070
- legend_title='Clusters'
1071
- )
1072
-
1073
- return fig_routes, equations_output
1074
- except Exception as e:
1075
- error_fig = go.Figure()
1076
- error_fig.add_annotation(text=f"Error: {str(e)}", xref="paper", yref="paper", x=0.5, y=0.5, showarrow=False)
1077
- return error_fig, f"Error in cluster analysis: {str(e)}"
1078
-
1079
- # Function to perform logistic regression
1080
- def perform_logistic_regression(start_year, start_month, end_year, end_month, regression_type):
1081
- start_date = datetime(int(start_year), int(start_month), 1)
1082
- end_date = datetime(int(end_year), int(end_month), 28)
1083
-
1084
- try:
1085
- filtered_data = merged_data[
1086
- (merged_data['ISO_TIME'] >= start_date) &
1087
- (merged_data['ISO_TIME'] <= end_date)
1088
- ]
1089
-
1090
- if regression_type == "Wind Speed":
1091
- filtered_data['severe_typhoon'] = (filtered_data['USA_WIND'] >= 64).astype(int) # 64 knots threshold for severe typhoons
1092
- X = sm.add_constant(filtered_data['ONI'])
1093
- y = filtered_data['severe_typhoon']
1094
- model = sm.Logit(y, X).fit()
1095
-
1096
- beta_1 = model.params['ONI']
1097
- exp_beta_1 = np.exp(beta_1)
1098
- p_value = model.pvalues['ONI']
1099
-
1100
- el_nino_data = filtered_data[filtered_data['ONI'] >= 0.5]
1101
- la_nina_data = filtered_data[filtered_data['ONI'] <= -0.5]
1102
- neutral_data = filtered_data[(filtered_data['ONI'] > -0.5) & (filtered_data['ONI'] < 0.5)]
1103
-
1104
- el_nino_severe = el_nino_data['severe_typhoon'].mean() if not el_nino_data.empty else 0
1105
- la_nina_severe = la_nina_data['severe_typhoon'].mean() if not la_nina_data.empty else 0
1106
- neutral_severe = neutral_data['severe_typhoon'].mean() if not neutral_data.empty else 0
1107
-
1108
- result = f"""
1109
- # Wind Speed Logistic Regression Results
1110
-
1111
- β1 (ONI coefficient): {beta_1:.4f}
1112
- exp(β1) (Odds Ratio): {exp_beta_1:.4f}
1113
- P-value: {p_value:.4f}
1114
-
1115
- Interpretation:
1116
- - For each unit increase in ONI, the odds of a severe typhoon are {"increased" if exp_beta_1 > 1 else "decreased"} by a factor of {exp_beta_1:.2f}.
1117
- - This effect is {"statistically significant" if p_value < 0.05 else "not statistically significant"} at the 0.05 level.
1118
-
1119
- Proportion of severe typhoons:
1120
- - El Niño conditions: {el_nino_severe:.2%}
1121
- - La Niña conditions: {la_nina_severe:.2%}
1122
- - Neutral conditions: {neutral_severe:.2%}
1123
- """
1124
-
1125
- elif regression_type == "Pressure":
1126
- filtered_data['intense_typhoon'] = (filtered_data['USA_PRES'] <= 950).astype(int) # 950 hPa threshold for intense typhoons
1127
- X = sm.add_constant(filtered_data['ONI'])
1128
- y = filtered_data['intense_typhoon']
1129
- model = sm.Logit(y, X).fit()
1130
-
1131
- beta_1 = model.params['ONI']
1132
- exp_beta_1 = np.exp(beta_1)
1133
- p_value = model.pvalues['ONI']
1134
-
1135
- el_nino_data = filtered_data[filtered_data['ONI'] >= 0.5]
1136
- la_nina_data = filtered_data[filtered_data['ONI'] <= -0.5]
1137
- neutral_data = filtered_data[(filtered_data['ONI'] > -0.5) & (filtered_data['ONI'] < 0.5)]
1138
-
1139
- el_nino_intense = el_nino_data['intense_typhoon'].mean() if not el_nino_data.empty else 0
1140
- la_nina_intense = la_nina_data['intense_typhoon'].mean() if not la_nina_data.empty else 0
1141
- neutral_intense = neutral_data['intense_typhoon'].mean() if not neutral_data.empty else 0
1142
-
1143
- result = f"""
1144
- # Pressure Logistic Regression Results
1145
-
1146
- β1 (ONI coefficient): {beta_1:.4f}
1147
- exp(β1) (Odds Ratio): {exp_beta_1:.4f}
1148
- P-value: {p_value:.4f}
1149
-
1150
- Interpretation:
1151
- - For each unit increase in ONI, the odds of an intense typhoon (pressure <= 950 hPa) are {"increased" if exp_beta_1 > 1 else "decreased"} by a factor of {exp_beta_1:.2f}.
1152
- - This effect is {"statistically significant" if p_value < 0.05 else "not statistically significant"} at the 0.05 level.
1153
-
1154
- Proportion of intense typhoons:
1155
- - El Niño conditions: {el_nino_intense:.2%}
1156
- - La Niña conditions: {la_nina_intense:.2%}
1157
- - Neutral conditions: {neutral_intense:.2%}
1158
- """
1159
-
1160
- elif regression_type == "Longitude":
1161
- filtered_data = filtered_data.dropna(subset=['LON'])
1162
-
1163
- if len(filtered_data) == 0:
1164
- return "Insufficient data for longitude analysis"
1165
-
1166
- filtered_data['western_typhoon'] = (filtered_data['LON'] <= 140).astype(int) # 140°E as threshold for western typhoons
1167
- X = sm.add_constant(filtered_data['ONI'])
1168
- y = filtered_data['western_typhoon']
1169
- model = sm.Logit(y, X).fit()
1170
-
1171
- beta_1 = model.params['ONI']
1172
- exp_beta_1 = np.exp(beta_1)
1173
- p_value = model.pvalues['ONI']
1174
-
1175
- el_nino_data = filtered_data[filtered_data['ONI'] >= 0.5]
1176
- la_nina_data = filtered_data[filtered_data['ONI'] <= -0.5]
1177
- neutral_data = filtered_data[(filtered_data['ONI'] > -0.5) & (filtered_data['ONI'] < 0.5)]
1178
-
1179
- el_nino_western = el_nino_data['western_typhoon'].mean() if not el_nino_data.empty else 0
1180
- la_nina_western = la_nina_data['western_typhoon'].mean() if not la_nina_data.empty else 0
1181
- neutral_western = neutral_data['western_typhoon'].mean() if not neutral_data.empty else 0
1182
-
1183
- result = f"""
1184
- # Longitude Logistic Regression Results
1185
-
1186
- β1 (ONI coefficient): {beta_1:.4f}
1187
- exp(β1) (Odds Ratio): {exp_beta_1:.4f}
1188
- P-value: {p_value:.4f}
1189
-
1190
- Interpretation:
1191
- - For each unit increase in ONI, the odds of a typhoon forming west of 140°E are {"increased" if exp_beta_1 > 1 else "decreased"} by a factor of {exp_beta_1:.2f}.
1192
- - This effect is {"statistically significant" if p_value < 0.05 else "not statistically significant"} at the 0.05 level.
1193
-
1194
- Proportion of typhoons forming west of 140°E:
1195
- - El Niño conditions: {el_nino_western:.2%}
1196
- - La Niña conditions: {la_nina_western:.2%}
1197
- - Neutral conditions: {neutral_western:.2%}
1198
- """
1199
-
1200
- return result
1201
- except Exception as e:
1202
- return f"Error performing logistic regression: {str(e)}"
1203
- def get_typhoons_for_year(year):
1204
- if not year or ibtracs is None:
1205
- return []
1206
-
1207
- try:
1208
- year = int(year)
1209
- if year not in ibtracs.data:
1210
- return []
1211
-
1212
- season = ibtracs.get_season(year)
1213
- storm_summary = season.summary()
1214
-
1215
- typhoon_options = []
1216
- for i in range(storm_summary['season_storms']):
1217
- try:
1218
- storm_id = storm_summary['id'][i]
1219
- storm_name = storm_summary['name'][i]
1220
- # Use storm name as the display name, but return the ID as the value
1221
- display_name = f"{storm_name} ({storm_id})"
1222
- typhoon_options.append((display_name, storm_id))
1223
- except Exception as e:
1224
- print(f"Error retrieving typhoon info: {e}")
1225
- continue
1226
-
1227
- return typhoon_options
1228
- except Exception as e:
1229
- print(f"Error getting typhoons for year {year}: {e}")
1230
- return []
1231
- # Define Gradio interface
1232
- def create_interface():
1233
- # Initialize data first
1234
- global oni_df, ibtracs, oni_data, typhoon_data, oni_long, typhoon_max, merged_data
1235
- oni_df, ibtracs, typhoon_data = initialize_data()
1236
-
1237
- # Define interface tabs
1238
- with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
1239
- gr.Markdown("# Typhoon Analysis Dashboard")
1240
-
1241
- with gr.Tab("Typhoon Tracks Analysis"):
1242
- with gr.Row():
1243
- with gr.Column():
1244
- start_year = gr.Number(value=2000, label="Start Year", minimum=1950, maximum=2024, step=1)
1245
- start_month = gr.Number(value=1, label="Start Month", minimum=1, maximum=12, step=1)
1246
- with gr.Column():
1247
- end_year = gr.Number(value=2024, label="End Year", minimum=1950, maximum=2024, step=1)
1248
- end_month = gr.Number(value=6, label="End Month", minimum=1, maximum=12, step=1)
1249
-
1250
- enso_dropdown = gr.Dropdown(
1251
- choices=["All Years", "El Niño Years", "La Niña Years", "Neutral Years"],
1252
- value="All Years",
1253
- label="ENSO Phase"
1254
- )
1255
-
1256
- typhoon_search = gr.Textbox(label="Search Typhoon Name")
1257
-
1258
- analyze_button = gr.Button("Analyze Tracks")
1259
-
1260
- with gr.Row():
1261
- tracks_plot = gr.Plot(label="Typhoon Tracks")
1262
- stats_text = gr.Textbox(label="Statistics", lines=4)
1263
-
1264
- with gr.Row():
1265
- wind_plot = gr.Plot(label="Wind Speed vs ONI")
1266
- pressure_plot = gr.Plot(label="Pressure vs ONI")
1267
-
1268
- analyze_button.click(
1269
- analyze_typhoon_tracks,
1270
- inputs=[start_year, start_month, end_year, end_month, enso_dropdown, typhoon_search],
1271
- outputs=[tracks_plot, wind_plot, pressure_plot, stats_text]
1272
- )
1273
-
1274
- with gr.Tab("Clustering Analysis"):
1275
- with gr.Row():
1276
- with gr.Column():
1277
- cluster_start_year = gr.Number(value=2000, label="Start Year", minimum=1950, maximum=2024, step=1)
1278
- cluster_start_month = gr.Number(value=1, label="Start Month", minimum=1, maximum=12, step=1)
1279
- with gr.Column():
1280
- cluster_end_year = gr.Number(value=2024, label="End Year", minimum=1950, maximum=2024, step=1)
1281
- cluster_end_month = gr.Number(value=6, label="End Month", minimum=1, maximum=12, step=1)
1282
-
1283
- with gr.Row():
1284
- n_clusters = gr.Number(value=5, label="Number of Clusters", minimum=1, maximum=20, step=1)
1285
- cluster_enso_dropdown = gr.Dropdown(
1286
- choices=["All Years", "El Niño Years", "La Niña Years", "Neutral Years"],
1287
- value="All Years",
1288
- label="ENSO Phase"
1289
- )
1290
-
1291
- analysis_type = gr.Radio(
1292
- choices=["Show Routes", "Show Clusters", "Fourier Series"],
1293
- value="Show Clusters",
1294
- label="Analysis Type"
1295
- )
1296
-
1297
- cluster_button = gr.Button("Run Cluster Analysis")
1298
-
1299
- cluster_plot = gr.Plot(label="Typhoon Routes Clustering")
1300
- equation_text = gr.Textbox(label="Cluster Equations", lines=15)
1301
-
1302
- cluster_button.click(
1303
- run_cluster_analysis,
1304
- inputs=[
1305
- cluster_start_year, cluster_start_month, cluster_end_year,
1306
- cluster_end_month, n_clusters, cluster_enso_dropdown, analysis_type
1307
- ],
1308
- outputs=[cluster_plot, equation_text]
1309
- )
1310
-
1311
- with gr.Tab("Regression Analysis"):
1312
- with gr.Row():
1313
- with gr.Column():
1314
- reg_start_year = gr.Number(value=2000, label="Start Year", minimum=1950, maximum=2024, step=1)
1315
- reg_start_month = gr.Number(value=1, label="Start Month", minimum=1, maximum=12, step=1)
1316
- with gr.Column():
1317
- reg_end_year = gr.Number(value=2024, label="End Year", minimum=1950, maximum=2024, step=1)
1318
- reg_end_month = gr.Number(value=6, label="End Month", minimum=1, maximum=12, step=1)
1319
-
1320
- regression_type = gr.Radio(
1321
- choices=["Wind Speed", "Pressure", "Longitude"],
1322
- value="Wind Speed",
1323
- label="Regression Type"
1324
- )
1325
-
1326
- regression_button = gr.Button("Perform Logistic Regression")
1327
-
1328
- regression_results = gr.Textbox(label="Regression Results", lines=15)
1329
-
1330
- regression_button.click(
1331
- perform_logistic_regression,
1332
- inputs=[reg_start_year, reg_start_month, reg_end_year, reg_end_month, regression_type],
1333
- outputs=regression_results
1334
- )
1335
-
1336
-
1337
- with gr.Tab("Typhoon Path Animation"):
1338
- with gr.Row():
1339
- # Use default values first, we'll populate after data loads
1340
- year_dropdown = gr.Dropdown(
1341
- choices=[2020, 2021, 2022, 2023, 2024],
1342
- value=2023,
1343
- label="Year"
1344
- )
1345
-
1346
- typhoon_dropdown = gr.Dropdown(
1347
- choices=["Select a year first"],
1348
- label="Typhoon"
1349
- )
1350
-
1351
- standard_dropdown = gr.Radio(
1352
- choices=["atlantic", "taiwan"],
1353
- value="atlantic",
1354
- label="Classification Standard"
1355
- )
1356
-
1357
- # Function to populate year dropdown
1358
- def populate_years():
1359
- if ibtracs is None:
1360
- return [2020, 2021, 2022, 2023, 2024]
1361
-
1362
- available_years = []
1363
- for year in range(1950, 2025):
1364
- if year in ibtracs.data:
1365
- available_years.append(year)
1366
-
1367
- return sorted(available_years, reverse=True)
1368
-
1369
- # Function to update typhoon dropdown when year changes
1370
- def update_typhoons(year):
1371
- if not year:
1372
- return ["No typhoons available"]
1373
-
1374
- typhoons = get_typhoons_for_year(int(year))
1375
- if not typhoons:
1376
- return ["No typhoons available for this year"]
1377
-
1378
- return [name for name, _ in typhoons]
1379
-
1380
- # Update typhoon dropdown when year changes
1381
- year_dropdown.change(
1382
- update_typhoons,
1383
- inputs=year_dropdown,
1384
- outputs=typhoon_dropdown
1385
- )
1386
-
1387
- # Add a "Generate" button
1388
- generate_btn = gr.Button("Generate Typhoon Path")
1389
-
1390
- # Display area for the typhoon animation
1391
- path_plot = gr.Plot(label="Typhoon Path")
1392
-
1393
- generate_btn.click(
1394
- create_typhoon_path_animation,
1395
- inputs=[year_dropdown, typhoon_dropdown, standard_dropdown],
1396
- outputs=path_plot
1397
- )
1398
-
1399
- # Add a "Load Years" button
1400
- gr.Button("Load Available Years").click(
1401
- populate_years,
1402
- inputs=None,
1403
- outputs=year_dropdown
1404
- )
1405
-
1406
- # Run the app
1407
- if __name__ == "__main__":
1408
- # For Hugging Face, use a simpler version without threading
1409
- DATA_PATH = os.path.dirname(os.path.abspath(__file__))
1410
-
1411
- print(f"Using data path: {DATA_PATH}")
1412
-
1413
- ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
1414
- TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
1415
- LOCAL_iBtrace_PATH = os.path.join(DATA_PATH, 'ibtracs.WP.list.v04r01.csv')
1416
- CACHE_FILE = os.path.join(DATA_PATH, 'ibtracs_cache.pkl')
1417
-
1418
- # Create and launch the Gradio interface
1419
- demo = create_interface()
1420
- demo.launch() # No parameters for Hugging Face Spaces
 
1
  import gradio as gr
2
  import plotly.graph_objects as go
3
  import plotly.express as px
 
 
4
  import pandas as pd
5
  import numpy as np
6
+ from datetime import datetime
 
 
 
 
 
 
7
  from scipy import stats
 
8
  from sklearn.linear_model import LinearRegression
9
  from sklearn.cluster import KMeans
10
  from scipy.interpolate import interp1d
11
  from fractions import Fraction
 
 
12
  import statsmodels.api as sm
13
+ import tropycal.tracks as tracks
14
+ import os
15
+ import pickle
16
  import requests
 
17
  import tempfile
 
 
18
  import shutil
19
  import filecmp
20
+ import csv
21
+ from collections import defaultdict
22
+ import argparse
23
 
24
+ # Command-line argument parsing
25
  parser = argparse.ArgumentParser(description='Typhoon Analysis Dashboard')
26
  parser.add_argument('--data_path', type=str, default=os.getcwd(), help='Path to the data directory')
27
  args = parser.parse_args()
 
 
28
  DATA_PATH = args.data_path
29
 
30
+ # File paths
31
  ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
32
  TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
33
+ LOCAL_iBtrace_PATH = os.path.join(DATA_PATH, 'ibtracs.WP.list.v04r01.csv')
34
  iBtrace_uri = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/ibtracs.WP.list.v04r01.csv'
 
35
  CACHE_FILE = 'ibtracs_cache.pkl'
36
  CACHE_EXPIRY_DAYS = 1
 
37
 
38
+ # Color map for categories
39
+ color_map = {
40
+ 'C5 Super Typhoon': 'rgb(255, 0, 0)',
41
+ 'C4 Very Strong Typhoon': 'rgb(255, 63, 0)',
42
+ 'C3 Strong Typhoon': 'rgb(255, 127, 0)',
43
+ 'C2 Typhoon': 'rgb(255, 191, 0)',
44
+ 'C1 Typhoon': 'rgb(255, 255, 0)',
45
+ 'Tropical Storm': 'rgb(0, 255, 255)',
46
+ 'Tropical Depression': 'rgb(173, 216, 230)'
47
+ }
48
 
49
+ # Classification standards
50
+ atlantic_standard = {
51
+ 'C5 Super Typhoon': {'wind_speed': 137, 'color': 'rgb(255, 0, 0)'},
52
+ 'C4 Very Strong Typhoon': {'wind_speed': 113, 'color': 'rgb(255, 63, 0)'},
53
+ 'C3 Strong Typhoon': {'wind_speed': 96, 'color': 'rgb(255, 127, 0)'},
54
+ 'C2 Typhoon': {'wind_speed': 83, 'color': 'rgb(255, 191, 0)'},
55
+ 'C1 Typhoon': {'wind_speed': 64, 'color': 'rgb(255, 255, 0)'},
56
+ 'Tropical Storm': {'wind_speed': 34, 'color': 'rgb(0, 255, 255)'},
57
+ 'Tropical Depression': {'wind_speed': 0, 'color': 'rgb(173, 216, 230)'}
58
+ }
 
 
59
 
60
+ taiwan_standard = {
61
+ 'Strong Typhoon': {'wind_speed': 51.0, 'color': 'rgb(255, 0, 0)'},
62
+ 'Medium Typhoon': {'wind_speed': 33.7, 'color': 'rgb(255, 127, 0)'},
63
+ 'Mild Typhoon': {'wind_speed': 17.2, 'color': 'rgb(255, 255, 0)'},
64
+ 'Tropical Depression': {'wind_speed': 0, 'color': 'rgb(173, 216, 230)'}
 
 
 
65
  }
66
 
67
+ # Data loading and processing functions (unchanged from Dash)
68
  def convert_typhoondata(input_file, output_file):
69
  with open(input_file, 'r') as infile:
 
70
  next(infile)
71
  next(infile)
 
72
  reader = csv.reader(infile)
 
 
73
  sid_data = defaultdict(list)
 
74
  for row in reader:
75
+ if not row:
76
  continue
 
77
  sid = row[0]
78
  iso_time = row[6]
79
  sid_data[sid].append((row, iso_time))
 
80
  with open(output_file, 'w', newline='') as outfile:
81
  fieldnames = ['SID', 'ISO_TIME', 'LAT', 'LON', 'SEASON', 'NAME', 'WMO_WIND', 'WMO_PRES', 'USA_WIND', 'USA_PRES', 'START_DATE', 'END_DATE']
82
  writer = csv.DictWriter(outfile, fieldnames=fieldnames)
 
83
  writer.writeheader()
 
84
  for sid, data in sid_data.items():
85
  start_date = min(data, key=lambda x: x[1])[1]
86
  end_date = max(data, key=lambda x: x[1])[1]
 
87
  for row, iso_time in data:
88
  writer.writerow({
89
+ 'SID': row[0], 'ISO_TIME': iso_time, 'LAT': row[8], 'LON': row[9], 'SEASON': row[1], 'NAME': row[5],
90
+ 'WMO_WIND': row[10].strip() or ' ', 'WMO_PRES': row[11].strip() or ' ',
91
+ 'USA_WIND': row[23].strip() or ' ', 'USA_PRES': row[24].strip() or ' ',
92
+ 'START_DATE': start_date, 'END_DATE': end_date
 
 
 
 
 
 
 
 
93
  })
94
 
 
95
  def download_oni_file(url, filename):
 
96
  try:
97
  response = requests.get(url)
98
+ response.raise_for_status()
99
  with open(filename, 'wb') as f:
100
  f.write(response.content)
 
101
  return True
102
+ except requests.RequestException:
 
103
  return False
104
 
 
105
  def convert_oni_ascii_to_csv(input_file, output_file):
106
  data = defaultdict(lambda: [''] * 12)
107
+ season_to_month = {'DJF': 12, 'JFM': 1, 'FMA': 2, 'MAM': 3, 'AMJ': 4, 'MJJ': 5, 'JJA': 6, 'JAS': 7, 'ASO': 8, 'SON': 9, 'OND': 10, 'NDJ': 11}
108
+ with open(input_file, 'r') as f:
109
+ lines = f.readlines()[1:]
110
+ for line in lines:
111
+ parts = line.split()
112
+ if len(parts) >= 4:
113
+ season, year, anom = parts[0], parts[1], parts[-1]
114
+ if season in season_to_month:
115
+ month = season_to_month[season]
116
+ if season == 'DJF':
117
+ year = str(int(year) - 1)
118
+ data[year][month-1] = anom
119
+ with open(output_file, 'w', newline='') as f:
120
+ writer = csv.writer(f)
121
+ writer.writerow(['Year', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
122
+ for year in sorted(data.keys()):
123
+ writer.writerow([year] + data[year])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
124
 
125
  def update_oni_data():
 
 
 
 
 
 
 
 
126
  url = "https://www.cpc.ncep.noaa.gov/data/indices/oni.ascii.txt"
127
  temp_file = os.path.join(DATA_PATH, "temp_oni.ascii.txt")
128
  input_file = os.path.join(DATA_PATH, "oni.ascii.txt")
129
  output_file = ONI_DATA_PATH
 
130
  if download_oni_file(url, temp_file):
131
  if not os.path.exists(input_file) or not filecmp.cmp(temp_file, input_file, shallow=False):
 
132
  os.replace(temp_file, input_file)
 
133
  convert_oni_ascii_to_csv(input_file, output_file)
 
134
  else:
135
+ os.remove(temp_file)
 
 
 
 
 
 
 
136
 
137
  def load_ibtracs_data():
138
+ if os.path.exists(CACHE_FILE) and (datetime.now() - datetime.fromtimestamp(os.path.getmtime(CACHE_FILE))).days < CACHE_EXPIRY_DAYS:
139
+ with open(CACHE_FILE, 'rb') as f:
140
+ return pickle.load(f)
 
 
 
 
141
  if os.path.exists(LOCAL_iBtrace_PATH):
 
142
  ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
143
  else:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
144
  response = requests.get(iBtrace_uri)
145
  response.raise_for_status()
 
146
  with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv') as temp_file:
147
  temp_file.write(response.text)
148
  temp_file_path = temp_file.name
 
 
149
  shutil.move(temp_file_path, LOCAL_iBtrace_PATH)
 
 
 
 
 
150
  ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
151
+ with open(CACHE_FILE, 'wb') as f:
152
+ pickle.dump(ibtracs, f)
153
+ return ibtracs
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154
 
155
  def process_oni_data(oni_data):
156
  oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
157
+ month_map = {'Jan': '01', 'Feb': '02', 'Mar': '03', 'Apr': '04', 'May': '05', 'Jun': '06', 'Jul': '07', 'Aug': '08', 'Sep': '09', 'Oct': '10', 'Nov': '11', 'Dec': '12'}
158
+ oni_long['Month'] = oni_long['Month'].map(month_map)
 
 
159
  oni_long['Date'] = pd.to_datetime(oni_long['Year'].astype(str) + '-' + oni_long['Month'] + '-01')
160
  oni_long['ONI'] = pd.to_numeric(oni_long['ONI'], errors='coerce')
161
  return oni_long
162
 
 
 
 
 
 
 
 
 
163
  def process_typhoon_data(typhoon_data):
164
  typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
165
  typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
166
  typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce')
167
  typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce')
 
168
  typhoon_max = typhoon_data.groupby('SID').agg({
169
+ 'USA_WIND': 'max', 'USA_PRES': 'min', 'ISO_TIME': 'first', 'SEASON': 'first', 'NAME': 'first', 'LAT': 'first', 'LON': 'first'
 
 
 
 
 
 
170
  }).reset_index()
 
171
  typhoon_max['Month'] = typhoon_max['ISO_TIME'].dt.strftime('%m')
172
  typhoon_max['Year'] = typhoon_max['ISO_TIME'].dt.year
173
  typhoon_max['Category'] = typhoon_max['USA_WIND'].apply(categorize_typhoon)
174
  return typhoon_max
175
 
 
 
 
 
176
  def merge_data(oni_long, typhoon_max):
177
  return pd.merge(typhoon_max, oni_long, on=['Year', 'Month'])
178
 
179
+ def categorize_typhoon(wind_speed):
180
+ wind_speed_kt = wind_speed / 2
181
+ if wind_speed_kt >= 137/2.35:
182
+ return 'C5 Super Typhoon'
183
+ elif wind_speed_kt >= 113/2.35:
184
+ return 'C4 Very Strong Typhoon'
185
+ elif wind_speed_kt >= 96/2.35:
186
+ return 'C3 Strong Typhoon'
187
+ elif wind_speed_kt >= 83/2.35:
188
+ return 'C2 Typhoon'
189
+ elif wind_speed_kt >= 64/2.35:
190
+ return 'C1 Typhoon'
191
+ elif wind_speed_kt >= 34/2.35:
192
+ return 'Tropical Storm'
193
+ else:
194
+ return 'Tropical Depression'
 
 
 
 
 
 
 
 
 
 
 
 
 
195
 
196
  def classify_enso_phases(oni_value):
197
  if isinstance(oni_value, pd.Series):
 
203
  else:
204
  return 'Neutral'
205
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
206
  def filter_west_pacific_coordinates(lons, lats):
207
  mask = (100 <= lons) & (lons <= 180) & (0 <= lats) & (lats <= 40)
208
  return lons[mask], lats[mask]
209
 
210
+ def get_storm_data(storm_id):
211
+ return ibtracs.get_storm(storm_id)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
212
 
213
+ # Load data globally
214
+ update_oni_data()
215
+ ibtracs = load_ibtracs_data()
216
+ convert_typhoondata(LOCAL_iBtrace_PATH, TYPHOON_DATA_PATH)
217
+ oni_data = pd.read_csv(ONI_DATA_PATH)
218
+ typhoon_data = pd.read_csv(TYPHOON_DATA_PATH, low_memory=False)
219
+ oni_long = process_oni_data(oni_data)
220
+ typhoon_max = process_typhoon_data(typhoon_data)
221
+ merged_data = merge_data(oni_long, typhoon_max)
222
+ oni_df = pd.read_csv(ONI_DATA_PATH, index_col='Date', parse_dates=True)
223
+
224
+ # Main Analysis Function
225
+ def main_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
226
+ start_date = datetime(start_year, start_month, 1)
227
+ end_date = datetime(end_year, end_month, 28)
228
+ filtered_oni_df = oni_df[(oni_df.index >= start_date) & (oni_df.index <= end_date)]
229
+ filtered_data = merged_data[(merged_data['Year'] >= start_year) & (merged_data['Year'] <= end_year) &
230
+ (merged_data['Month'].astype(int) >= start_month) & (merged_data['Month'].astype(int) <= end_month)]
231
+
232
+ # Typhoon Tracks
233
+ fig_tracks = go.Figure()
234
+ regression_data = {'El Nino': {'longitudes': [], 'oni_values': [], 'names': []}, 'La Nina': {'longitudes': [], 'oni_values': [], 'names': []},
235
+ 'Neutral': {'longitudes': [], 'oni_values': [], 'names': []}, 'All': {'longitudes': [], 'oni_values': [], 'names': []}}
236
+ for year in range(start_year, end_year + 1):
237
+ season = ibtracs.get_season(year)
238
+ for storm_id in season.summary()['id']:
239
+ storm = get_storm_data(storm_id)
240
+ storm_dates = storm.time
241
+ if any(start_date <= date <= end_date for date in storm_dates):
242
+ storm_oni = filtered_oni_df.loc[storm_dates[0].strftime('%Y-%b')]['ONI']
243
+ if isinstance(storm_oni, pd.Series):
244
+ storm_oni = storm_oni.iloc[0]
245
+ phase = classify_enso_phases(storm_oni)
246
+ regression_data[phase]['longitudes'].append(storm.lon[0])
247
+ regression_data[phase]['oni_values'].append(storm_oni)
248
+ regression_data[phase]['names'].append(f'{storm.name} ({year})')
249
+ regression_data['All']['longitudes'].append(storm.lon[0])
250
+ regression_data['All']['oni_values'].append(storm_oni)
251
+ regression_data['All']['names'].append(f'{storm.name} ({year})')
252
+ if (enso_phase == 'All Years' or (enso_phase == 'El Niño Years' and phase == 'El Nino') or
253
+ (enso_phase == 'La Niña Years' and phase == 'La Nina') or (enso_phase == 'Neutral Years' and phase == 'Neutral')):
254
+ color = {'El Nino': 'red', 'La Nina': 'blue', 'Neutral': 'green'}[phase]
255
+ fig_tracks.add_trace(go.Scattergeo(lon=storm.lon, lat=storm.lat, mode='lines', name=storm.name,
256
+ text=f'{storm.name} ({year})', hoverinfo='text', line=dict(width=2, color=color)))
257
+ fig_tracks.update_layout(title=f'Typhoon Tracks from {start_year}-{start_month} to {end_year}-{end_month}', geo=dict(projection_type='natural earth', showland=True))
258
+
259
+ # All Years Regression
260
+ all_years_fig = go.Figure()
261
+ df_all = pd.DataFrame({'Longitude': regression_data['All']['longitudes'], 'ONI': regression_data['All']['oni_values'], 'Name': regression_data['All']['names']})
262
+ if not df_all.empty and len(df_all) > 1:
263
+ all_years_fig = px.scatter(df_all, x='Longitude', y='ONI', hover_data=['Name'], title='All Years Typhoon Generation vs. ONI')
264
+ X = np.array(df_all['Longitude']).reshape(-1, 1)
265
+ y = df_all['ONI']
266
+ model = LinearRegression().fit(X, y)
267
+ y_pred = model.predict(X)
268
+ all_years_fig.add_trace(go.Scatter(x=df_all['Longitude'], y=y_pred, mode='lines', name='Regression Line'))
269
+
270
+ # Regression Graphs by Phase
271
+ regression_html = ""
272
+ slopes_html = ""
273
+ for phase in ['El Nino', 'La Nina', 'Neutral']:
274
+ df = pd.DataFrame({'Longitude': regression_data[phase]['longitudes'], 'ONI': regression_data[phase]['oni_values'], 'Name': regression_data[phase]['names']})
275
+ if not df.empty and len(df) > 1:
276
+ fig = px.scatter(df, x='Longitude', y='ONI', hover_data=['Name'], title=f'{phase} Typhoon Generation vs. ONI')
277
+ X = np.array(df['Longitude']).reshape(-1, 1)
278
+ y = df['ONI']
279
+ model = LinearRegression().fit(X, y)
280
+ y_pred = model.predict(X)
281
+ slope = model.coef_[0]
282
+ correlation_coef = np.corrcoef(df['Longitude'], df['ONI'])[0, 1]
283
+ fig.add_trace(go.Scatter(x=df['Longitude'], y=y_pred, mode='lines', name='Regression Line'))
284
+ regression_html += fig.to_html(include_plotlyjs=False)
285
+ slopes_html += f"<p>{phase} Regression Slope: {slope:.4f}, Correlation Coefficient: {correlation_coef:.4f}</p>"
286
+
287
+ # Wind and Pressure Scatter Plots
288
+ wind_oni_scatter = px.scatter(filtered_data, x='ONI', y='USA_WIND', color='Category', hover_data=['NAME', 'Year', 'Category'],
289
+ title='Wind Speed vs ONI', labels={'USA_WIND': 'Maximum Wind Speed (knots)'}, color_discrete_map=color_map)
290
+ pressure_oni_scatter = px.scatter(filtered_data, x='ONI', y='USA_PRES', color='Category', hover_data=['NAME', 'Year', 'Category'],
291
+ title='Pressure vs ONI', labels={'USA_PRES': 'Minimum Pressure (hPa)'}, color_discrete_map=color_map)
292
+ if typhoon_search:
293
+ for fig in [wind_oni_scatter, pressure_oni_scatter]:
294
+ mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False)
295
+ fig.add_trace(go.Scatter(x=filtered_data.loc[mask, 'ONI'], y=filtered_data.loc[mask, 'USA_WIND' if 'Wind' in fig.layout.title.text else 'USA_PRES'],
296
+ mode='markers', marker=dict(size=10, color='red', symbol='star'), name=f'Matched: {typhoon_search}'))
297
+
298
+ # Additional Metrics
299
+ max_wind_speed = filtered_data['USA_WIND'].max()
300
+ min_pressure = filtered_data['USA_PRES'].min()
301
+ typhoon_counts = filtered_data['ONI'].apply(classify_enso_phases).value_counts().to_dict()
302
+ month_counts = filtered_data.groupby([filtered_data['ONI'].apply(classify_enso_phases), filtered_data['ISO_TIME'].dt.month]).size().unstack(fill_value=0)
303
+ concentrated_months = month_counts.idxmax(axis=1).to_dict()
304
+ month_names = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
305
+ count_analysis_html = "".join([f"<p>{phase}: {count} typhoons</p>" for phase, count in typhoon_counts.items()])
306
+ month_analysis_html = "".join([f"<p>{phase}: Most concentrated in {month_names[month-1]}</p>" for phase, month in concentrated_months.items()])
307
+
308
+ return (fig_tracks, all_years_fig, regression_html, slopes_html, wind_oni_scatter, pressure_oni_scatter,
309
+ "Logistic Regression Results: See Logistic Regression Tab", f"Maximum Wind Speed: {max_wind_speed:.2f} knots",
310
+ f"Minimum Pressure: {min_pressure:.2f} hPa", "Wind-ONI correlation: See Logistic Regression Tab",
311
+ "Pressure-ONI correlation: See Logistic Regression Tab", count_analysis_html, month_analysis_html)
312
+
313
+ # Cluster Analysis Function
314
+ def cluster_analysis(n_clusters, show_clusters, show_routes, fourier_series, start_year, start_month, end_year, end_month, enso_phase):
315
+ start_date = datetime(start_year, start_month, 1)
316
+ end_date = datetime(end_year, end_month, 28)
317
+ filtered_oni_df = oni_df[(oni_df.index >= start_date) & (oni_df.index <= end_date)]
318
+ fig_routes = go.Figure()
319
+ west_pacific_storms = []
320
+ for year in range(start_year, end_year + 1):
321
+ season = ibtracs.get_season(year)
322
+ for storm_id in season.summary()['id']:
323
+ storm = get_storm_data(storm_id)
324
+ storm_date = storm.time[0]
325
+ storm_oni = filtered_oni_df.loc[storm_date.strftime('%Y-%b')]['ONI']
326
+ if isinstance(storm_oni, pd.Series):
327
+ storm_oni = storm_oni.iloc[0]
328
+ storm_phase = classify_enso_phases(storm_oni)
329
+ if (enso_phase == 'All Years' or (enso_phase == 'El Niño Years' and storm_phase == 'El Nino') or
330
+ (enso_phase == 'La Niña Years' and storm_phase == 'La Nina') or (enso_phase == 'Neutral Years' and storm_phase == 'Neutral')):
331
+ lons, lats = filter_west_pacific_coordinates(np.array(storm.lon), np.array(storm.lat))
332
+ if len(lons) > 1:
333
+ west_pacific_storms.append((lons, lats))
334
+
335
+ max_length = max(len(storm[0]) for storm in west_pacific_storms)
336
+ standardized_routes = []
337
+ for lons, lats in west_pacific_storms:
338
+ if len(lons) < 2:
339
+ continue
340
+ t = np.linspace(0, 1, len(lons))
341
+ t_new = np.linspace(0, 1, max_length)
342
+ lon_interp = interp1d(t, lons, kind='linear')(t_new)
343
+ lat_interp = interp1d(t, lats, kind='linear')(t_new)
344
+ route_vector = np.column_stack((lon_interp, lat_interp)).flatten()
345
+ standardized_routes.append(route_vector)
346
+
347
+ kmeans = KMeans(n_clusters=n_clusters, random_state=42, n_init=10)
348
+ clusters = kmeans.fit_predict(standardized_routes)
349
+ cluster_counts = np.bincount(clusters)
350
+ equations_html = ""
351
+ if show_routes:
352
+ for lons, lats in west_pacific_storms:
353
+ fig_routes.add_trace(go.Scattergeo(lon=lons, lat=lats, mode='lines', line=dict(width=1, color='lightgray'), showlegend=False, hoverinfo='none'))
354
+ if show_clusters:
355
+ for i in range(n_clusters):
356
+ cluster_center = kmeans.cluster_centers_[i].reshape(-1, 2)
357
+ fig_routes.add_trace(go.Scattergeo(lon=cluster_center[:, 0], lat=cluster_center[:, 1], mode='lines', name=f'Cluster {i+1} (n={cluster_counts[i]})', line=dict(width=3)))
358
+ if fourier_series:
359
+ X = cluster_center[:, 0]
360
+ y = cluster_center[:, 1]
361
+ x_min, x_max = X.min(), X.max()
362
+ X_normalized = 2 * np.pi * (X - x_min) / (x_max - x_min)
363
+ params, _ = curve_fit(lambda x, a0, a1, b1, a2, b2, a3, b3, a4, b4: a0 + a1*np.cos(x) + b1*np.sin(x) +
364
+ a2*np.cos(2*x) + b2*np.sin(2*x) + a3*np.cos(3*x) + b3*np.sin(3*x) + a4*np.cos(4*x) + b4*np.sin(4*x),
365
+ X_normalized, y)
366
+ a0, a1, b1, a2, b2, a3, b3, a4, b4 = params
367
+ equations_html += f"<h4>Cluster {i+1} (Typhoons: {cluster_counts[i]})</h4><p>Fourier Series: y = {a0:.4f} + {a1:.4f}*cos(x) + {b1:.4f}*sin(x) + " \
368
+ f"{a2:.4f}*cos(2x) + {b2:.4f}*sin(2x) + {a3:.4f}*cos(3x) + {b3:.4f}*sin(3x) + {a4:.4f}*cos(4x) + {b4:.4f}*sin(4x)</p>" \
369
+ f"<p>X Range: 0 to {2*np.pi:.4f}</p><p>Longitude Range: {x_min:.4f}°E to {x_max:.4f}°E</p><hr>"
370
+
371
+ fig_routes.update_layout(title=f'Typhoon Routes Clustering ({start_year}-{end_year}) - {enso_phase}', geo=dict(projection_type='mercator', showland=True,
372
+ lataxis={'range': [0, 40]}, lonaxis={'range': [100, 180]}))
373
+ return fig_routes, equations_html
374
+
375
+ # Logistic Regression Functions
376
+ def logistic_regression(regression_type, start_year, start_month, end_year, end_month):
377
+ start_date = datetime(start_year, start_month, 1)
378
+ end_date = datetime(end_year, end_month, 28)
379
+ filtered_data = merged_data[(merged_data['ISO_TIME'] >= start_date) & (merged_data['ISO_TIME'] <= end_date)]
380
+ if regression_type == 'Wind':
381
+ filtered_data['severe_typhoon'] = (filtered_data['USA_WIND'] >= 64).astype(int)
382
+ X = sm.add_constant(filtered_data['ONI'])
383
+ y = filtered_data['severe_typhoon']
384
+ model = sm.Logit(y, X).fit()
385
+ beta_1, exp_beta_1, p_value = model.params['ONI'], np.exp(model.params['ONI']), model.pvalues['ONI']
386
+ el_nino_severe = filtered_data[filtered_data['ONI'] >= 0.5]['severe_typhoon'].mean()
387
+ la_nina_severe = filtered_data[filtered_data['ONI'] <= -0.5]['severe_typhoon'].mean()
388
+ neutral_severe = filtered_data[(filtered_data['ONI'] > -0.5) & (filtered_data['ONI'] < 0.5)]['severe_typhoon'].mean()
389
+ return f"<h3>Wind Speed Logistic Regression</h3><p>β1: {beta_1:.4f}</p><p>Odds Ratio: {exp_beta_1:.4f}</p><p>P-value: {p_value:.4f}</p>" \
390
+ f"<p>El Niño: {el_nino_severe:.2%}</p><p>La Niña: {la_nina_severe:.2%}</p><p>Neutral: {neutral_severe:.2%}</p>"
391
+ elif regression_type == 'Pressure':
392
+ filtered_data['intense_typhoon'] = (filtered_data['USA_PRES'] <= 950).astype(int)
393
+ X = sm.add_constant(filtered_data['ONI'])
394
+ y = filtered_data['intense_typhoon']
395
+ model = sm.Logit(y, X).fit()
396
+ beta_1, exp_beta_1, p_value = model.params['ONI'], np.exp(model.params['ONI']), model.pvalues['ONI']
397
+ el_nino_intense = filtered_data[filtered_data['ONI'] >= 0.5]['intense_typhoon'].mean()
398
+ la_nina_intense = filtered_data[filtered_data['ONI'] <= -0.5]['intense_typhoon'].mean()
399
+ neutral_intense = filtered_data[(filtered_data['ONI'] > -0.5) & (filtered_data['ONI'] < 0.5)]['intense_typhoon'].mean()
400
+ return f"<h3>Pressure Logistic Regression</h3><p>β1: {beta_1:.4f}</p><p>Odds Ratio: {exp_beta_1:.4f}</p><p>P-value: {p_value:.4f}</p>" \
401
+ f"<p>El Niño: {el_nino_intense:.2%}</p><p>La Niña: {la_nina_intense:.2%}</p><p>Neutral: {neutral_intense:.2%}</p>"
402
+ elif regression_type == 'Longitude':
403
+ filtered_data = filtered_data.dropna(subset=['LON'])
404
+ filtered_data['western_typhoon'] = (filtered_data['LON'] <= 140).astype(int)
405
+ X = sm.add_constant(filtered_data['ONI'])
406
+ y = filtered_data['western_typhoon']
407
+ model = sm.Logit(y, X).fit()
408
+ beta_1, exp_beta_1, p_value = model.params['ONI'], np.exp(model.params['ONI']), model.pvalues['ONI']
409
+ el_nino_western = filtered_data[filtered_data['ONI'] >= 0.5]['western_typhoon'].mean()
410
+ la_nina_western = filtered_data[filtered_data['ONI'] <= -0.5]['western_typhoon'].mean()
411
+ neutral_western = filtered_data[(filtered_data['ONI'] > -0.5) & (filtered_data['ONI'] < 0.5)]['western_typhoon'].mean()
412
+ return f"<h3>Longitude Logistic Regression</h3><p>β1: {beta_1:.4f}</p><p>Odds Ratio: {exp_beta_1:.4f}</p><p>P-value: {p_value:.4f}</p>" \
413
+ f"<p>El Niño: {el_nino_western:.2%}</p><p>La Niña: {la_nina_western:.2%}</p><p>Neutral: {neutral_western:.2%}</p>"
414
+
415
+ # Typhoon Path Animation Function
416
+ def typhoon_path_animation(year, typhoon, standard):
417
+ storm = ibtracs.get_storm(typhoon)
418
+ fig = go.Figure()
419
+ fig.add_trace(go.Scattergeo(lon=storm.lon, lat=storm.lat, mode='lines', line=dict(width=2, color='gray'), name='Path', showlegend=False))
420
+ fig.add_trace(go.Scattergeo(lon=[storm.lon[0]], lat=[storm.lat[0]], mode='markers', marker=dict(size=10, color='green', symbol='star'),
421
+ name='Starting Point', text=storm.time[0].strftime('%Y-%m-%d %H:%M'), hoverinfo='text+name'))
422
+ frames = []
423
+ for i in range(len(storm.time)):
424
+ category, color = categorize_typhoon_by_standard(storm.vmax[i], standard)
425
+ frame_data = [
426
+ go.Scattergeo(lon=storm.lon[:i+1], lat=storm.lat[:i+1], mode='lines', line=dict(width=2, color='blue'), name='Path Traveled', showlegend=False),
427
+ go.Scattergeo(lon=[storm.lon[i]], lat=[storm.lat[i]], mode='markers+text', marker=dict(size=10, color=color, symbol='star'),
428
+ text=category, textposition="top center", name='Current Location', hovertext=f"{storm.time[i].strftime('%Y-%m-%d %H:%M')}<br>Category: {category}<br>Wind Speed: {storm.vmax[i]:.1f} m/s")
429
+ ]
430
+ frames.append(go.Frame(data=frame_data, name=f"frame{i}"))
431
+ fig.frames = frames
432
+ fig.update_layout(title=f"{year} {storm.name} Typhoon Path", geo=dict(projection_type='natural earth', showland=True),
433
+ updatemenus=[{"buttons": [{"args": [None, {"frame": {"duration": 100, "redraw": True}, "fromcurrent": True, "transition": {"duration": 0}}], "label": "Play", "method": "animate"},
434
+ {"args": [[None], {"frame": {"duration": 0, "redraw": True}, "mode": "immediate", "transition": {"duration": 0}}], "label": "Pause", "method": "animate"}],
435
+ "direction": "left", "pad": {"r": 10, "t": 87}, "showactive": False, "type": "buttons", "x": 0.1, "xanchor": "right", "y": 0, "yanchor": "top"}],
436
+ sliders=[{"steps": [{"args": [[f"frame{k}"], {"frame": {"duration": 100, "redraw": True}, "mode": "immediate", "transition": {"duration": 0}}],
437
+ "label": storm.time[k].strftime('%Y-%m-%d %H:%M'), "method": "animate"} for k in range(len(storm.time))]}])
438
+ return fig
439
+
440
+ def categorize_typhoon_by_standard(wind_speed, standard):
441
  if standard == 'taiwan':
 
442
  wind_speed_ms = wind_speed * 0.514444
 
443
  if wind_speed_ms >= 51.0:
444
  return 'Strong Typhoon', taiwan_standard['Strong Typhoon']['color']
445
  elif wind_speed_ms >= 33.7:
 
449
  else:
450
  return 'Tropical Depression', taiwan_standard['Tropical Depression']['color']
451
  else:
 
452
  if wind_speed >= 137:
453
  return 'C5 Super Typhoon', atlantic_standard['C5 Super Typhoon']['color']
454
  elif wind_speed >= 113:
 
464
  else:
465
  return 'Tropical Depression', atlantic_standard['Tropical Depression']['color']
466
 
467
+ # Update Typhoon Dropdown
468
+ def update_typhoon_dropdown(selected_year):
469
+ season = ibtracs.get_season(selected_year)
470
+ storm_summary = season.summary()
471
+ options = [f"{storm_summary['name'][i]} ({storm_summary['id'][i]})" for i in range(storm_summary['season_storms'])]
472
+ values = [storm_summary['id'][i] for i in range(storm_summary['season_storms'])]
473
+ return gr.Dropdown.update(choices=options, value=values[0] if values else None)
474
+
475
+ # Gradio Interface
476
+ with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
477
+ gr.Markdown("# Typhoon Analysis Dashboard")
478
+
479
+ with gr.Tab("Main Analysis"):
480
+ with gr.Row():
481
+ start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
482
+ start_month = gr.Number(label="Start Month", value=1, minimum=1, maximum=12, step=1)
483
+ end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
484
+ end_month = gr.Number(label="End Month", value=6, minimum=1, maximum=12, step=1)
485
+ enso_dropdown = gr.Dropdown(label="ENSO Phase", choices=["All Years", "El Niño Years", "La Niña Years", "Neutral Years"], value="All Years")
486
+ typhoon_search = gr.Textbox(label="Search Typhoon Name")
487
+ analyze_button = gr.Button("Analyze")
488
+ typhoon_tracks = gr.Plot(label="Typhoon Tracks")
489
+ all_years_regression = gr.Plot(label="All Years Regression")
490
+ regression_graphs = gr.HTML(label="Regression Graphs by ENSO Phase")
491
+ slopes = gr.HTML(label="Slopes")
492
+ wind_oni_scatter = gr.Plot(label="Wind Speed vs ONI")
493
+ pressure_oni_scatter = gr.Plot(label="Pressure vs ONI")
494
+ correlation_text = gr.HTML(label="Correlation Coefficient")
495
+ max_wind_speed_text = gr.HTML(label="Max Wind Speed")
496
+ min_pressure_text = gr.HTML(label="Min Pressure")
497
+ wind_oni_correlation = gr.HTML(label="Wind-ONI Correlation")
498
+ pressure_oni_correlation = gr.HTML(label="Pressure-ONI Correlation")
499
+ count_analysis = gr.HTML(label="Typhoon Count Analysis")
500
+ month_analysis = gr.HTML(label="Concentrated Months Analysis")
501
+ analyze_button.click(main_analysis, inputs=[start_year, start_month, end_year, end_month, enso_dropdown, typhoon_search],
502
+ outputs=[typhoon_tracks, all_years_regression, regression_graphs, slopes, wind_oni_scatter, pressure_oni_scatter,
503
+ correlation_text, max_wind_speed_text, min_pressure_text, wind_oni_correlation, pressure_oni_correlation,
504
+ count_analysis, month_analysis])
505
+
506
+ with gr.Tab("Cluster Analysis"):
507
+ n_clusters = gr.Number(label="Number of Clusters", value=5, minimum=1, maximum=20, step=1)
508
+ show_clusters = gr.Checkbox(label="Show Clusters")
509
+ show_routes = gr.Checkbox(label="Show Typhoon Routes")
510
+ fourier_series = gr.Checkbox(label="Fourier Series")
511
+ with gr.Row():
512
+ cluster_start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
513
+ cluster_start_month = gr.Number(label="Start Month", value=1, minimum=1, maximum=12, step=1)
514
+ cluster_end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
515
+ cluster_end_month = gr.Number(label="End Month", value=6, minimum=1, maximum=12, step=1)
516
+ cluster_enso = gr.Dropdown(label="ENSO Phase", choices=["All Years", "El Niño Years", "La Niña Years", "Neutral Years"], value="All Years")
517
+ cluster_button = gr.Button("Generate Cluster Analysis")
518
+ cluster_figure = gr.Plot(label="Cluster Routes")
519
+ equations_output = gr.HTML(label="Cluster Equations")
520
+ cluster_button.click(cluster_analysis, inputs=[n_clusters, show_clusters, show_routes, fourier_series, cluster_start_year, cluster_start_month, cluster_end_year, cluster_end_month, cluster_enso],
521
+ outputs=[cluster_figure, equations_output])
522
+
523
+ with gr.Tab("Logistic Regression"):
524
+ regression_type = gr.Dropdown(label="Regression Type", choices=["Wind", "Pressure", "Longitude"], value="Wind")
525
+ with gr.Row():
526
+ reg_start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
527
+ reg_start_month = gr.Number(label="Start Month", value=1, minimum=1, maximum=12, step=1)
528
+ reg_end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
529
+ reg_end_month = gr.Number(label="End Month", value=6, minimum=1, maximum=12, step=1)
530
+ regression_button = gr.Button("Run Regression")
531
+ regression_results = gr.HTML(label="Regression Results")
532
+ regression_button.click(logistic_regression, inputs=[regression_type, reg_start_year, reg_start_month, reg_end_year, reg_end_month], outputs=[regression_results])
533
+
534
+ with gr.Tab("Typhoon Path Animation"):
535
+ year_dropdown = gr.Dropdown(label="Year", choices=[str(year) for year in range(1950, 2025)], value="2024")
536
+ typhoon_dropdown = gr.Dropdown(label="Typhoon", choices=[])
537
+ standard_dropdown = gr.Dropdown(label="Classification Standard", choices=["Atlantic", "Taiwan"], value="Atlantic")
538
+ animation_button = gr.Button("Generate Animation")
539
+ animation_figure = gr.Plot(label="Typhoon Path Animation")
540
+ year_dropdown.change(update_typhoon_dropdown, inputs=[year_dropdown], outputs=[typhoon_dropdown])
541
+ animation_button.click(typhoon_path_animation, inputs=[year_dropdown, typhoon_dropdown, standard_dropdown], outputs=[animation_figure])
542
+
543
+ demo.launch()