Muennighoff commited on
Commit
d492aea
·
1 Parent(s): 5403bbb

Create multi_threaded_processed.py

Browse files
Files changed (1) hide show
  1. multi_threaded_processed.py +171 -0
multi_threaded_processed.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from concurrent.futures import ThreadPoolExecutor
2
+ import os
3
+ import random
4
+ import subprocess
5
+ import timeit
6
+
7
+ import datasets
8
+
9
+ """Example
10
+ git init
11
+ git remote add origin https://github.com/huggingface/evaluate.git
12
+ git fetch --depth 2 origin 9b056cdd5eb95459ae80142014865263e7dd75b8
13
+ # Get file after change
14
+ git checkout FETCH_HEAD -- README.md
15
+ # Get file before change
16
+ git checkout FETCH_HEAD^ -- README.md
17
+ """
18
+
19
+ # In the multiprocessing case, the below leads to each process creating the same directory
20
+ # random = random.Random(42) # make it reproducible
21
+
22
+ NUM_THREADS = 16
23
+ NUM_PROC = 64
24
+ DEBUG_SIZE = 4096
25
+
26
+ CWD = os.getcwd()
27
+
28
+ # Shell utils
29
+ def run_in_shell(cmd: str, cwd=None, timeout=60):
30
+ return subprocess.run([cmd], capture_output=True, shell=True, cwd=cwd, timeout=timeout)
31
+
32
+ def get_file_contents(commit, old_file, new_file, repo, cwd=None):
33
+ completed = run_in_shell("git init", cwd=cwd)
34
+ completed = run_in_shell("git remote add origin " + repo, cwd=cwd)
35
+ completed = run_in_shell("git fetch --depth 2 origin " + commit, cwd=cwd)
36
+ # If it requires authentication
37
+ if completed.returncode != 0:
38
+ return ("", "")
39
+ # Optionally do git diff at the same time (Saving code needs to be added)
40
+ # git_diff = run_in_shell(f"git diff {commit}^ {commit}", cwd=cwd).stdout.decode(errors='ignore')
41
+ completed = run_in_shell("git checkout FETCH_HEAD -- " + new_file, cwd=cwd)
42
+ new_contents = run_in_shell("cat " + new_file, cwd=cwd).stdout.decode(errors='ignore')
43
+ completed = run_in_shell("git checkout FETCH_HEAD^ -- " + old_file, cwd=cwd)
44
+ # If there's only a new file, but no old file
45
+ if completed.returncode != 0:
46
+ return (new_contents, "")
47
+ old_contents = run_in_shell("cat " + old_file, cwd=cwd).stdout.decode(errors='ignore')
48
+ return (new_contents, old_contents)
49
+
50
+ def get_diff(ex):
51
+ commit_id = ex["commit"]
52
+ repos = list(set(ex["repos"].split(",")))
53
+ old_file = ex["old_file"]
54
+ new_file = ex["new_file"]
55
+ for i, repo in enumerate(repos):
56
+ repo = "https://xxx:[email protected]/" + repo + ".git"
57
+ # Create a random directory to store the repo
58
+ random_dir = CWD + "/" + str(random.randint(0, 1000000))
59
+ # Can take very long when running many processes
60
+ run_in_shell("mkdir " + random_dir, timeout=300)
61
+ try:
62
+ new_contents, old_contents = get_file_contents(commit_id, old_file, new_file, repo, cwd=random_dir)
63
+ except Exception as e:
64
+ # Break in case of many repos that all lead us nowhere
65
+ if i > 10:
66
+ break
67
+ continue
68
+ finally:
69
+ run_in_shell("rm -rf " + random_dir) # clean up again
70
+ ex["new_contents"] = new_contents
71
+ ex["old_contents"] = old_contents
72
+ return ex
73
+ # If no repo worked
74
+ ex["new_contents"] = ""
75
+ ex["old_contents"] = ""
76
+ return ex
77
+
78
+ def get_diff_multi_threaded_processed(batch):
79
+ with ThreadPoolExecutor(max_workers=NUM_THREADS) as executor:
80
+ # Convert dict of lists to list of dicts then map to threads
81
+ results = list(executor.map(get_diff, [dict(zip(batch,t)) for t in zip(*batch.values())]))
82
+ # Convert list of dicts to dict of lists
83
+ return {k: [dic[k] for dic in results] for k in results[0]}
84
+
85
+ if __name__ == "__main__":
86
+ # git clone bigcode/github-commits
87
+ ds = datasets.load_dataset("./github-commits", use_auth_token=True)["train"]
88
+
89
+ ### OPTIONAL FILTERING ###
90
+ """
91
+ java = [".java"]
92
+ javascript = [
93
+ ".js",
94
+ "._js",
95
+ ".bones",
96
+ ".es6",
97
+ ".jake",
98
+ ".jsb",
99
+ ".jscad",
100
+ ".jsfl",
101
+ ".jsm",
102
+ ".jss",
103
+ ".njs",
104
+ ".pac",
105
+ ".sjs",
106
+ ".ssjs",
107
+ ".xsjs",
108
+ ".xsjslib"
109
+ ]
110
+ python = [
111
+ ".py",
112
+ ".bzl",
113
+ ".gyp",
114
+ ".lmi",
115
+ ".pyde",
116
+ ".pyp",
117
+ ".pyt",
118
+ ".pyw",
119
+ ".tac",
120
+ ".wsgi",
121
+ ".xpy"
122
+ ]
123
+
124
+ import json
125
+ with open("programming-languages.json", "r") as f:
126
+ extensions = json.load(f)
127
+ suffices = [suffix for suffices in extensions.values() for suffix in suffices]
128
+ def filter_extension(ex):
129
+ splits = ex["new_file"].split(".")
130
+ if len(splits) == 1: return False
131
+ return "." + splits[-1] in suffices
132
+ def filter_extension_python(ex):
133
+ splits = ex["new_file"].split(".")
134
+ if len(splits) == 1: return False
135
+ splits = ex["new_file"].split(".")
136
+ return "." + splits[-1] in python
137
+ def filter_update(ex):
138
+ return ex["message"] != "Update " + ex["old_file"]
139
+
140
+ filter_msg = ["initial commit", "please\n", "please"]
141
+
142
+ def filter_misc(ex):
143
+ return ex["message"] not in filter_msg
144
+ # Removes ~10M
145
+ ds = ds.filter(filter_extension, num_proc=NUM_PROC)
146
+ print("After Extension filter", len(ds))
147
+ # Removes ~1M
148
+ ds = ds.filter(filter_update, num_proc=NUM_PROC)
149
+ print("After Update filter", len(ds))
150
+ ds = ds.filter(filter_extension_python, num_proc=NUM_PROC)
151
+ print("After Python filter", len(ds))
152
+ ds = ds.filter(filter_misc, num_proc=NUM_PROC)
153
+ print("After Misc filter", len(ds))
154
+ ds = ds.select(range(DEBUG_SIZE))
155
+ """
156
+ ### END FILTERING ###
157
+
158
+ def run_multi_processing_threading():
159
+ ds.map(get_diff_multi_threaded_processed, num_proc=NUM_PROC, batch_size=NUM_THREADS, batched=True).to_json("mpt.jsonl")
160
+
161
+ NUM_TRIALS = 1
162
+ print(f"Timing multithreading + multiprocessing using {NUM_THREADS} threads and {NUM_PROC} processes")
163
+ time = timeit.timeit(stmt=run_multi_processing_threading, number=NUM_TRIALS)
164
+ print("Time:", time)
165
+ with open("mpt.txt", "w") as f:
166
+ f.write(str(time))
167
+
168
+ #run_multi_processing_threading()
169
+
170
+
171
+