Muennighoff commited on
Commit
10de591
·
1 Parent(s): f0abe0a

Update multi_threaded_processed.py

Browse files
Files changed (1) hide show
  1. multi_threaded_processed.py +30 -19
multi_threaded_processed.py CHANGED
@@ -36,7 +36,7 @@ def get_file_contents(commit, old_file, new_file, repo, cwd=None):
36
  # If it requires authentication
37
  if completed.returncode != 0:
38
  #print("ERRORC1", completed)
39
- return ("", "")
40
  # Optionally do git diff at the same time (Saving code needs to be added)
41
  # git_diff = run_in_shell(f"git diff {commit}^ {commit}", cwd=cwd).stdout.decode(errors='ignore')
42
  completed = run_in_shell("git checkout FETCH_HEAD -- " + new_file, cwd=cwd)
@@ -45,15 +45,19 @@ def get_file_contents(commit, old_file, new_file, repo, cwd=None):
45
  # If there's only a new file, but no old file
46
  if completed.returncode != 0:
47
  #print("ERRORC2", completed)
48
- return (new_contents, "")
49
  old_contents = run_in_shell("cat " + old_file, cwd=cwd).stdout.decode(errors='ignore')
50
- return (new_contents, old_contents)
51
 
52
  def get_diff(ex):
53
  commit_id = ex["commit"]
54
  repos = list(set(ex["repos"].split(",")))
55
  old_file = ex["old_file"]
56
  new_file = ex["new_file"]
 
 
 
 
57
  for i, repo in enumerate(repos):
58
  repo = "https://xxx:[email protected]/" + repo + ".git"
59
  # Create a random directory to store the repo
@@ -61,7 +65,7 @@ def get_diff(ex):
61
  # Can take very long when running many processes
62
  run_in_shell("mkdir " + random_dir, timeout=300)
63
  try:
64
- new_contents, old_contents = get_file_contents(commit_id, old_file, new_file, repo, cwd=random_dir)
65
  except Exception as e:
66
  #print("ERROR", commit_id, old_file, new_file, repo, str(random_dir), e)
67
  # Break in case of many repos that all lead us nowhere
@@ -72,10 +76,14 @@ def get_diff(ex):
72
  run_in_shell("rm -rf " + random_dir) # clean up again
73
  ex["new_contents"] = new_contents
74
  ex["old_contents"] = old_contents
 
 
75
  return ex
76
  # If no repo worked
77
  ex["new_contents"] = ""
78
  ex["old_contents"] = ""
 
 
79
  return ex
80
 
81
  def get_diff_multi_threaded_processed(batch):
@@ -91,8 +99,8 @@ if __name__ == "__main__":
91
 
92
  ### OPTIONAL FILTERING ###
93
  #"""
94
- java = tuple([".java"])
95
- javascript = tuple([
96
  ".js",
97
  "._js",
98
  ".bones",
@@ -109,8 +117,8 @@ if __name__ == "__main__":
109
  ".ssjs",
110
  ".xsjs",
111
  ".xsjslib"
112
- ])
113
- python = tuple([
114
  ".py",
115
  ".bzl",
116
  ".gyp",
@@ -121,24 +129,24 @@ if __name__ == "__main__":
121
  ".pyw",
122
  ".tac",
123
  ".wsgi",
124
- ".xpy",
125
- ])
126
 
127
  import json
128
  with open("programming-languages.json", "r") as f:
129
  extensions = json.load(f)
130
- suffices = tuple([suffix for suffices in extensions.values() for suffix in suffices] )
131
  def filter_extension(ex):
132
  return ex["new_file"].endswith(suffices)
133
-
134
  def filter_extension_python(ex):
135
  return ex["new_file"].endswith(python)
136
-
137
  def filter_update(ex):
138
  return ex["message"] != "Update " + ex["old_file"]
139
 
140
  filter_msg = ["initial commit", "please\n", "please", "lalala"]
141
-
142
  def filter_misc(ex):
143
  return ex["message"] not in filter_msg
144
 
@@ -151,7 +159,7 @@ if __name__ == "__main__":
151
  #ds = ds.filter(filter_extension_python, num_proc=NUM_PROC)
152
  #print("After Python filter", len(ds))
153
  ds = ds.filter(filter_misc, num_proc=NUM_PROC)
154
- print("After Misc filter", len(ds))
155
  #ds = ds.select(range(DEBUG_SIZE))
156
  START = 0 # Modify for each instance (0 - 7)
157
  samples_per_instance = 64 * 64 * 64 * 32 # 8_388_608
@@ -159,17 +167,20 @@ if __name__ == "__main__":
159
  select_end = START * samples_per_instance + samples_per_instance
160
  ds = ds.select(range(select_start, select_end))
161
  print(f"Going from {select_start} till {select_end}")
162
-
163
  #"""
164
  ### END FILTERING ###
165
-
166
  def run_multi_processing_threading():
167
  ds.map(get_diff_multi_threaded_processed, num_proc=NUM_PROC, batch_size=NUM_THREADS, batched=True).to_json(f"diffs_{select_start}_{select_end}.jsonl", num_proc=NUM_PROC)
168
 
 
169
  #NUM_TRIALS = 1
170
  #print(f"Timing multithreading + multiprocessing using {NUM_THREADS} threads and {NUM_PROC} processes")
171
  #time = timeit.timeit(stmt=run_multi_processing_threading, number=NUM_TRIALS)
172
  #print("Time:", time)
173
  #with open("mpt.txt", "w") as f:
174
- # f.write(str(time))
175
- run_multi_processing_threading()
 
 
 
36
  # If it requires authentication
37
  if completed.returncode != 0:
38
  #print("ERRORC1", completed)
39
+ return ("", "", completed.returncode, completed.stderr.decode(errors='ignore'))
40
  # Optionally do git diff at the same time (Saving code needs to be added)
41
  # git_diff = run_in_shell(f"git diff {commit}^ {commit}", cwd=cwd).stdout.decode(errors='ignore')
42
  completed = run_in_shell("git checkout FETCH_HEAD -- " + new_file, cwd=cwd)
 
45
  # If there's only a new file, but no old file
46
  if completed.returncode != 0:
47
  #print("ERRORC2", completed)
48
+ return (new_contents, "", completed.returncode, completed.stderr.decode(errors='ignore'))
49
  old_contents = run_in_shell("cat " + old_file, cwd=cwd).stdout.decode(errors='ignore')
50
+ return (new_contents, old_contents, completed.returncode, completed.stderr.decode(errors='ignore'))
51
 
52
  def get_diff(ex):
53
  commit_id = ex["commit"]
54
  repos = list(set(ex["repos"].split(",")))
55
  old_file = ex["old_file"]
56
  new_file = ex["new_file"]
57
+ # Initialize
58
+ returncode = 0
59
+ stderr = "unknown"
60
+
61
  for i, repo in enumerate(repos):
62
  repo = "https://xxx:[email protected]/" + repo + ".git"
63
  # Create a random directory to store the repo
 
65
  # Can take very long when running many processes
66
  run_in_shell("mkdir " + random_dir, timeout=300)
67
  try:
68
+ new_contents, old_contents, returncode, stderr = get_file_contents(commit_id, old_file, new_file, repo, cwd=random_dir)
69
  except Exception as e:
70
  #print("ERROR", commit_id, old_file, new_file, repo, str(random_dir), e)
71
  # Break in case of many repos that all lead us nowhere
 
76
  run_in_shell("rm -rf " + random_dir) # clean up again
77
  ex["new_contents"] = new_contents
78
  ex["old_contents"] = old_contents
79
+ ex["returncode"] = returncode
80
+ ex["stderr"] = stderr
81
  return ex
82
  # If no repo worked
83
  ex["new_contents"] = ""
84
  ex["old_contents"] = ""
85
+ ex["returncode"] = returncode
86
+ ex["stderr"] = stderr
87
  return ex
88
 
89
  def get_diff_multi_threaded_processed(batch):
 
99
 
100
  ### OPTIONAL FILTERING ###
101
  #"""
102
+ java = [".java"]
103
+ javascript = [
104
  ".js",
105
  "._js",
106
  ".bones",
 
117
  ".ssjs",
118
  ".xsjs",
119
  ".xsjslib"
120
+ ]
121
+ python = [
122
  ".py",
123
  ".bzl",
124
  ".gyp",
 
129
  ".pyw",
130
  ".tac",
131
  ".wsgi",
132
+ ".xpy"
133
+ ]
134
 
135
  import json
136
  with open("programming-languages.json", "r") as f:
137
  extensions = json.load(f)
138
+ suffices = tuple([suffix for suffices in extensions.values() for suffix in suffices])
139
  def filter_extension(ex):
140
  return ex["new_file"].endswith(suffices)
141
+
142
  def filter_extension_python(ex):
143
  return ex["new_file"].endswith(python)
144
+
145
  def filter_update(ex):
146
  return ex["message"] != "Update " + ex["old_file"]
147
 
148
  filter_msg = ["initial commit", "please\n", "please", "lalala"]
149
+
150
  def filter_misc(ex):
151
  return ex["message"] not in filter_msg
152
 
 
159
  #ds = ds.filter(filter_extension_python, num_proc=NUM_PROC)
160
  #print("After Python filter", len(ds))
161
  ds = ds.filter(filter_misc, num_proc=NUM_PROC)
162
+ print("After Misc filter", len(ds))
163
  #ds = ds.select(range(DEBUG_SIZE))
164
  START = 0 # Modify for each instance (0 - 7)
165
  samples_per_instance = 64 * 64 * 64 * 32 # 8_388_608
 
167
  select_end = START * samples_per_instance + samples_per_instance
168
  ds = ds.select(range(select_start, select_end))
169
  print(f"Going from {select_start} till {select_end}")
170
+
171
  #"""
172
  ### END FILTERING ###
173
+
174
  def run_multi_processing_threading():
175
  ds.map(get_diff_multi_threaded_processed, num_proc=NUM_PROC, batch_size=NUM_THREADS, batched=True).to_json(f"diffs_{select_start}_{select_end}.jsonl", num_proc=NUM_PROC)
176
 
177
+ # Benchmarking
178
  #NUM_TRIALS = 1
179
  #print(f"Timing multithreading + multiprocessing using {NUM_THREADS} threads and {NUM_PROC} processes")
180
  #time = timeit.timeit(stmt=run_multi_processing_threading, number=NUM_TRIALS)
181
  #print("Time:", time)
182
  #with open("mpt.txt", "w") as f:
183
+ # f.write(str(time))
184
+
185
+ # Running
186
+ run_multi_processing_threading()