euler314 commited on
Commit
1c62817
·
verified ·
1 Parent(s): e6b6548

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -7
app.py CHANGED
@@ -33,7 +33,7 @@ DATA_PATH = args.data_path
33
 
34
  ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
35
  TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
36
- LOCAL_iBtrace_PATH = os.path.join(DATA_PATH, 'ibtracs.WP.list.v04r01.csv')
37
  iBtrace_uri = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/ibtracs.ALL.list.v04r01.csv'
38
  CACHE_FILE = 'ibtracs_cache.pkl'
39
  CACHE_EXPIRY_DAYS = 1
@@ -129,19 +129,37 @@ def load_ibtracs_data():
129
  if os.path.exists(CACHE_FILE) and (datetime.now() - datetime.fromtimestamp(os.path.getmtime(CACHE_FILE))).days < CACHE_EXPIRY_DAYS:
130
  with open(CACHE_FILE, 'rb') as f:
131
  return pickle.load(f)
132
- if os.path.exists(LOCAL_iBtrace_PATH):
133
- # Remove the basin='west_pacific' parameter to load all basins
134
- ibtracs = tracks.TrackDataset(source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
 
 
 
 
 
135
  else:
 
136
  response = requests.get(iBtrace_uri)
137
  response.raise_for_status()
138
  with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv') as temp_file:
139
  temp_file.write(response.text)
140
- shutil.move(temp_file.name, LOCAL_iBtrace_PATH)
141
- # Remove the basin='west_pacific' parameter here as well
142
- ibtracs = tracks.TrackDataset(source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
 
 
 
 
 
 
 
 
 
 
 
143
  with open(CACHE_FILE, 'wb') as f:
144
  pickle.dump(ibtracs, f)
 
145
  return ibtracs
146
 
147
  def convert_typhoondata(input_file, output_file):
 
33
 
34
  ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
35
  TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
36
+ LOCAL_iBtrace_PATH = os.path.join(DATA_PATH, 'ibtracs.ALL.list.v04r01.csv')
37
  iBtrace_uri = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/ibtracs.ALL.list.v04r01.csv'
38
  CACHE_FILE = 'ibtracs_cache.pkl'
39
  CACHE_EXPIRY_DAYS = 1
 
129
  if os.path.exists(CACHE_FILE) and (datetime.now() - datetime.fromtimestamp(os.path.getmtime(CACHE_FILE))).days < CACHE_EXPIRY_DAYS:
130
  with open(CACHE_FILE, 'rb') as f:
131
  return pickle.load(f)
132
+
133
+ # Define both paths we might need
134
+ all_basins_path = os.path.join(DATA_PATH, 'ibtracs.ALL.list.v04r01.csv')
135
+
136
+ # Try to load all basins file first
137
+ if os.path.exists(all_basins_path):
138
+ print("Loading ALL basins file...")
139
+ ibtracs = tracks.TrackDataset(source='ibtracs', ibtracs_url=all_basins_path)
140
  else:
141
+ print("Downloading ALL basins file...")
142
  response = requests.get(iBtrace_uri)
143
  response.raise_for_status()
144
  with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv') as temp_file:
145
  temp_file.write(response.text)
146
+ shutil.move(temp_file.name, all_basins_path)
147
+ # Make sure we load the full ALL dataset
148
+ ibtracs = tracks.TrackDataset(source='ibtracs', ibtracs_url=all_basins_path)
149
+
150
+ # Check if we have all basins
151
+ available_basins = set()
152
+ for storm_id in ibtracs.keys():
153
+ if len(storm_id) >= 2:
154
+ basin_code = storm_id[:2] # First two characters are usually basin code
155
+ available_basins.add(basin_code)
156
+
157
+ print(f"Available basin codes: {available_basins}")
158
+
159
+ # Save to cache
160
  with open(CACHE_FILE, 'wb') as f:
161
  pickle.dump(ibtracs, f)
162
+
163
  return ibtracs
164
 
165
  def convert_typhoondata(input_file, output_file):