Datasets:

ArXiv:
License:
File size: 2,801 Bytes
0f141f9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
# %%
import os

import h5py
import matplotlib.pyplot as plt
from tqdm import tqdm
import pandas as pd


# %%
h5_dirs = ["./quakeflow_nc/waveform_h5", "./quakeflow_sc/waveform_h5"]
h5_out = "waveform.h5"
h5_train = "waveform_train.h5"
h5_test = "waveform_test.h5"

# # %%
# h5_dir = "waveform_h5"
# h5_out = "waveform.h5"
# h5_train = "waveform_train.h5"
# h5_test = "waveform_test.h5"

h5_file_lists = [sorted(os.listdir(h5_dir)) for h5_dir in h5_dirs]
train_file_lists = [x[:-1] for x in h5_file_lists]
test_file_lists = [x[-1:] for x in h5_file_lists]
# train_files = h5_files
# train_files = [x for x in train_files if (x != "2014.h5") and (x not in [])]
# test_files = []
print(f"train files: {train_file_lists}")
print(f"test files: {test_file_lists}")

# %%
# %%
with h5py.File(h5_out, "w") as fp:
    # external linked file
    for h5_dir, h5_files in zip(h5_dirs, h5_file_lists):
        for h5_file in h5_files:
            with h5py.File(os.path.join(h5_dir, h5_file), "r") as f:
                for event in tqdm(f.keys(), desc=h5_file, total=len(f.keys())):
                    if event not in fp:
                        fp[event] = h5py.ExternalLink(os.path.join(h5_dir, h5_file), event)
                    else:
                        print(f"{event} already exists")
                        continue

# %%
with h5py.File(h5_train, "w") as fp:
    # external linked file
    for h5_dir, h5_files in zip(h5_dirs, train_file_lists):
        for h5_file in h5_files:
            with h5py.File(os.path.join(h5_dir, h5_file), "r") as f:
                for event in tqdm(f.keys(), desc=h5_file, total=len(f.keys())):
                    if event not in fp:
                        fp[event] = h5py.ExternalLink(os.path.join(h5_dir, h5_file), event)
                    else:
                        print(f"{event} already exists")
                        continue

# %%
with h5py.File(h5_test, "w") as fp:
    # external linked file
    for h5_dir, h5_files in zip(h5_dirs, test_file_lists):
        for h5_file in h5_files:
            with h5py.File(os.path.join(h5_dir, h5_file), "r") as f:
                for event in tqdm(f.keys(), desc=h5_file, total=len(f.keys())):
                    if event not in fp:
                        fp[event] = h5py.ExternalLink(os.path.join(h5_dir, h5_file), event)
                    else:
                        print(f"{event} already exists")
                        continue

dirs = ["./quakeflow_nc", "./quakeflow_sc"]
csv_files = ['events.csv', 'events_test.csv', 'events_train.csv', 'picks.csv', 'picks_test.csv', 'picks_train.csv']

for csv_file in csv_files:
    dfs = []
    for dir in dirs:
        df = pd.read_csv(f"{dir}/{csv_file}")
        dfs.append(df)
    df = pd.concat(dfs)
    df.to_csv(csv_file, index=False, na_rep='')