|
from concurrent.futures import ThreadPoolExecutor |
|
import os |
|
import random |
|
import subprocess |
|
import timeit |
|
|
|
import datasets |
|
|
|
"""Example |
|
git init |
|
git remote add origin https://github.com/huggingface/evaluate.git |
|
git fetch --depth 2 origin 9b056cdd5eb95459ae80142014865263e7dd75b8 |
|
# Get file after change |
|
git checkout FETCH_HEAD -- README.md |
|
# Get file before change |
|
git checkout FETCH_HEAD^ -- README.md |
|
""" |
|
|
|
|
|
|
|
|
|
NUM_THREADS = 16 |
|
NUM_PROC = 64 |
|
DEBUG_SIZE = 4096 |
|
|
|
CWD = os.getcwd() |
|
|
|
|
|
def run_in_shell(cmd: str, cwd=None, timeout=60): |
|
return subprocess.run([cmd], capture_output=True, shell=True, cwd=cwd, timeout=timeout) |
|
|
|
def get_file_contents(commit, old_file, new_file, repo, cwd=None): |
|
completed = run_in_shell("git init", cwd=cwd) |
|
completed = run_in_shell("git remote add origin " + repo, cwd=cwd) |
|
completed = run_in_shell("git fetch --depth 2 origin " + commit, cwd=cwd) |
|
|
|
if completed.returncode != 0: |
|
return ("", "") |
|
|
|
|
|
completed = run_in_shell("git checkout FETCH_HEAD -- " + new_file, cwd=cwd) |
|
new_contents = run_in_shell("cat " + new_file, cwd=cwd).stdout.decode(errors='ignore') |
|
completed = run_in_shell("git checkout FETCH_HEAD^ -- " + old_file, cwd=cwd) |
|
|
|
if completed.returncode != 0: |
|
return (new_contents, "") |
|
old_contents = run_in_shell("cat " + old_file, cwd=cwd).stdout.decode(errors='ignore') |
|
return (new_contents, old_contents) |
|
|
|
def get_diff(ex): |
|
commit_id = ex["commit"] |
|
repos = list(set(ex["repos"].split(","))) |
|
old_file = ex["old_file"] |
|
new_file = ex["new_file"] |
|
for i, repo in enumerate(repos): |
|
repo = "https://xxx:[email protected]/" + repo + ".git" |
|
|
|
random_dir = CWD + "/" + str(random.randint(0, 1000000)) |
|
|
|
run_in_shell("mkdir " + random_dir, timeout=300) |
|
try: |
|
new_contents, old_contents = get_file_contents(commit_id, old_file, new_file, repo, cwd=random_dir) |
|
except Exception as e: |
|
|
|
if i > 10: |
|
break |
|
continue |
|
finally: |
|
run_in_shell("rm -rf " + random_dir) |
|
ex["new_contents"] = new_contents |
|
ex["old_contents"] = old_contents |
|
return ex |
|
|
|
ex["new_contents"] = "" |
|
ex["old_contents"] = "" |
|
return ex |
|
|
|
def get_diff_multi_threaded_processed(batch): |
|
with ThreadPoolExecutor(max_workers=NUM_THREADS) as executor: |
|
|
|
results = list(executor.map(get_diff, [dict(zip(batch,t)) for t in zip(*batch.values())])) |
|
|
|
return {k: [dic[k] for dic in results] for k in results[0]} |
|
|
|
if __name__ == "__main__": |
|
|
|
ds = datasets.load_dataset("./github-commits", use_auth_token=True)["train"] |
|
|
|
|
|
""" |
|
java = [".java"] |
|
javascript = [ |
|
".js", |
|
"._js", |
|
".bones", |
|
".es6", |
|
".jake", |
|
".jsb", |
|
".jscad", |
|
".jsfl", |
|
".jsm", |
|
".jss", |
|
".njs", |
|
".pac", |
|
".sjs", |
|
".ssjs", |
|
".xsjs", |
|
".xsjslib" |
|
] |
|
python = [ |
|
".py", |
|
".bzl", |
|
".gyp", |
|
".lmi", |
|
".pyde", |
|
".pyp", |
|
".pyt", |
|
".pyw", |
|
".tac", |
|
".wsgi", |
|
".xpy" |
|
] |
|
|
|
import json |
|
with open("programming-languages.json", "r") as f: |
|
extensions = json.load(f) |
|
suffices = [suffix for suffices in extensions.values() for suffix in suffices] |
|
def filter_extension(ex): |
|
splits = ex["new_file"].split(".") |
|
if len(splits) == 1: return False |
|
return "." + splits[-1] in suffices |
|
def filter_extension_python(ex): |
|
splits = ex["new_file"].split(".") |
|
if len(splits) == 1: return False |
|
splits = ex["new_file"].split(".") |
|
return "." + splits[-1] in python |
|
def filter_update(ex): |
|
return ex["message"] != "Update " + ex["old_file"] |
|
|
|
filter_msg = ["initial commit", "please\n", "please"] |
|
|
|
def filter_misc(ex): |
|
return ex["message"] not in filter_msg |
|
# Removes ~10M |
|
ds = ds.filter(filter_extension, num_proc=NUM_PROC) |
|
print("After Extension filter", len(ds)) |
|
# Removes ~1M |
|
ds = ds.filter(filter_update, num_proc=NUM_PROC) |
|
print("After Update filter", len(ds)) |
|
ds = ds.filter(filter_extension_python, num_proc=NUM_PROC) |
|
print("After Python filter", len(ds)) |
|
ds = ds.filter(filter_misc, num_proc=NUM_PROC) |
|
print("After Misc filter", len(ds)) |
|
ds = ds.select(range(DEBUG_SIZE)) |
|
""" |
|
|
|
|
|
def run_multi_processing_threading(): |
|
ds.map(get_diff_multi_threaded_processed, num_proc=NUM_PROC, batch_size=NUM_THREADS, batched=True).to_json("mpt.jsonl") |
|
|
|
NUM_TRIALS = 1 |
|
print(f"Timing multithreading + multiprocessing using {NUM_THREADS} threads and {NUM_PROC} processes") |
|
time = timeit.timeit(stmt=run_multi_processing_threading, number=NUM_TRIALS) |
|
print("Time:", time) |
|
with open("mpt.txt", "w") as f: |
|
f.write(str(time)) |
|
|
|
|
|
|
|
|
|
|
|
|