euler314 commited on
Commit
07d18a8
·
verified ·
1 Parent(s): a6945e3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -6
app.py CHANGED
@@ -36,7 +36,7 @@ TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
36
  LOCAL_iBtrace_PATH = os.path.join(DATA_PATH, 'ibtracs.ALL.list.v04r01.csv')
37
  iBtrace_uri = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/ibtracs.ALL.list.v04r01.csv'
38
  CACHE_FILE = 'ibtracs_cache.pkl'
39
- CACHE_EXPIRY_DAYS = 1
40
 
41
  # Color maps for Plotly (RGB)
42
  color_map = {
@@ -130,11 +130,9 @@ def load_ibtracs_data():
130
  with open(CACHE_FILE, 'rb') as f:
131
  return pickle.load(f)
132
 
133
- # Define the path for the all basins file
134
  all_basins_path = os.path.join(DATA_PATH, 'ibtracs.ALL.list.v04r01.csv')
135
 
136
  try:
137
- # Try to load all basins file first
138
  if os.path.exists(all_basins_path):
139
  print("Loading ALL basins file...")
140
  ibtracs = tracks.TrackDataset(source='ibtracs', ibtracs_url=all_basins_path)
@@ -145,10 +143,9 @@ def load_ibtracs_data():
145
  with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv') as temp_file:
146
  temp_file.write(response.text)
147
  shutil.move(temp_file.name, all_basins_path)
148
- # Load the full ALL dataset
149
  ibtracs = tracks.TrackDataset(source='ibtracs', ibtracs_url=all_basins_path)
150
 
151
- # Save to cache
152
  with open(CACHE_FILE, 'wb') as f:
153
  pickle.dump(ibtracs, f)
154
 
@@ -156,7 +153,6 @@ def load_ibtracs_data():
156
 
157
  except Exception as e:
158
  print(f"Error loading IBTrACS data: {e}")
159
- # As a fallback, try loading just the default data that comes with tropycal
160
  print("Attempting to load default dataset...")
161
  ibtracs = tracks.TrackDataset(basin='all')
162
  with open(CACHE_FILE, 'wb') as f:
@@ -208,6 +204,9 @@ def process_typhoon_data(typhoon_data):
208
  typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
209
  typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce')
210
  typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce')
 
 
 
211
  typhoon_max = typhoon_data.groupby('SID').agg({
212
  'USA_WIND': 'max', 'USA_PRES': 'min', 'ISO_TIME': 'first', 'SEASON': 'first', 'NAME': 'first',
213
  'LAT': 'first', 'LON': 'first'
 
36
  LOCAL_iBtrace_PATH = os.path.join(DATA_PATH, 'ibtracs.ALL.list.v04r01.csv')
37
  iBtrace_uri = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/ibtracs.ALL.list.v04r01.csv'
38
  CACHE_FILE = 'ibtracs_cache.pkl'
39
+ CACHE_EXPIRY_DAYS = 0 # Force refresh for testing
40
 
41
  # Color maps for Plotly (RGB)
42
  color_map = {
 
130
  with open(CACHE_FILE, 'rb') as f:
131
  return pickle.load(f)
132
 
 
133
  all_basins_path = os.path.join(DATA_PATH, 'ibtracs.ALL.list.v04r01.csv')
134
 
135
  try:
 
136
  if os.path.exists(all_basins_path):
137
  print("Loading ALL basins file...")
138
  ibtracs = tracks.TrackDataset(source='ibtracs', ibtracs_url=all_basins_path)
 
143
  with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv') as temp_file:
144
  temp_file.write(response.text)
145
  shutil.move(temp_file.name, all_basins_path)
146
+ print(f"Downloaded {all_basins_path}. Verify it contains all basins.")
147
  ibtracs = tracks.TrackDataset(source='ibtracs', ibtracs_url=all_basins_path)
148
 
 
149
  with open(CACHE_FILE, 'wb') as f:
150
  pickle.dump(ibtracs, f)
151
 
 
153
 
154
  except Exception as e:
155
  print(f"Error loading IBTrACS data: {e}")
 
156
  print("Attempting to load default dataset...")
157
  ibtracs = tracks.TrackDataset(basin='all')
158
  with open(CACHE_FILE, 'wb') as f:
 
204
  typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
205
  typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce')
206
  typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce')
207
+ # Debug: Check unique basins
208
+ unique_basins = typhoon_data['SID'].str[:2].unique()
209
+ print(f"Unique basins in typhoon_data: {unique_basins}")
210
  typhoon_max = typhoon_data.groupby('SID').agg({
211
  'USA_WIND': 'max', 'USA_PRES': 'min', 'ISO_TIME': 'first', 'SEASON': 'first', 'NAME': 'first',
212
  'LAT': 'first', 'LON': 'first'