import os import hashlib import glob import pathlib from tqdm import tqdm import subprocess all_files = glob.glob("./*") folder_path = pathlib.Path("train_valid_data") print(folder_path) all_files = list(folder_path.rglob("*")) all_files = [i for i in all_files if not os.path.isdir(i)] from pprint import pprint def get_checksum(file_path): output = str(subprocess.Popen(["/usr/bin/sha256sum" , str(file_path)], stdout=subprocess.PIPE).communicate()[0].decode("UTF-8")) output = output.replace(" ", ",") return output # print(get_checksum(all_files[0])) # all_files = all_files[:5] with open("sha256_checksums.csv", "w+") as file: file.write("checksum,file_path\n") for file_path in tqdm(all_files): file.write(get_checksum(file_path)) import huggingface as hh from huggingface_hub import HfApi api = HfApi() # upload results to a public dataset # also save as a file api.upload_file( path_or_fileobj="sha256_checksums.csv", path_in_repo="sha256_checksums.csv", repo_id="osbm/project-checksums", repo_type="dataset", token="TOKEN" )