File size: 1,469 Bytes
a0268ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import os
import json
import tqdm
import argparse
import functools
import multiprocessing


def process_file(filename, margin_data=None):
    
    in_file = open(filename, "r")
    out_file = open(filename + ".tmp", "w+")

    for line in in_file:
        line = json.loads(line)
        line["margin_score"] = margin_data.get(line["id"], -1000)
        out_file.write(json.dumps(line, ensure_ascii=False) + "\n")

    in_file.close()
    out_file.close()
    os.rename(filename + ".tmp", filename)
        

def main(args):
    
    with open(args.positive_file) as f:
        positive_data = json.load(f)

    with open(args.negative_file) as f:
        negative_data = json.load(f)

    margin_data = {k: positive_data[k] - negative_data[k] for k in positive_data.keys()}

    process_file_fn = functools.partial(process_file, margin_data=margin_data)
    filenames = args.valid_filenames + args.train_filenames
    with multiprocessing.Pool(args.num_workers) as p:
        for _ in tqdm.tqdm(p.imap_unordered(process_file_fn, filenames), total=len(filenames)):
            pass


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--positive_file")
    parser.add_argument("--negative_file")
    parser.add_argument("--valid_filenames", nargs="+")
    parser.add_argument("--train_filenames", nargs="+")
    parser.add_argument("--num_workers", type=int, default=os.cpu_count())
    args = parser.parse_args()
    main(args)