File size: 1,746 Bytes
8ecda92
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
#!/usr/bin/env python3
from argparse import ArgumentParser, Namespace
from tqdm import tqdm
from pathlib import Path
import numpy as np
# from lovely_numpy import lo
from functools import partial
from multiprocessing import Pool

def do_one(file_path: Path, src_u: np.ndarray, src_std: np.ndarray, dest_u: np.ndarray, dest_std: np.ndarray):
    x = np.load(file_path)["arr_0"]
    y = (x - src_u) / src_std * dest_std + dest_u
    y = y.astype(x.dtype)
    np.savez_compressed(file_path, y)

def get_args() -> Namespace:
    parser = ArgumentParser()
    parser.add_argument("data_dir", type=Path)
    parser.add_argument("--statistics_file", type=Path, required=True)
    parser.add_argument("--source_task", required=True)
    parser.add_argument("--n_workers", type=int, default=1)
    args = parser.parse_args()
    return args

def main(args: Namespace):
    files = [x for x in args.data_dir.iterdir() if x.suffix == ".npz"]
    assert len(files) > 0
    stats = np.load(args.statistics_file, allow_pickle=True)["arr_0"].item()
    assert args.source_task in stats, (stats.keys(), args.source_task)
    src_u, src_std = stats[args.source_task][2].numpy(), stats[args.source_task][3].numpy()
    assert (dest_task := args.data_dir.parent.name) in stats, (stats.keys(), dest_task)
    dest_u, dest_std = stats[dest_task][2].numpy(), stats[dest_task][3].numpy()
    assert src_u.shape == dest_u.shape and src_std.shape == dest_std.shape, (src_u, dest_u, src_std, dest_std)

    fn = partial(do_one, src_u=src_u, src_std=src_std, dest_u=dest_u, dest_std=dest_std)
    map_fn = Pool(args.n_workers).imap if args.n_workers > 1 else map
    for _ in tqdm(map_fn(fn, files), total=len(files)): pass

if __name__ == "__main__":
    main(get_args())