Datasets:

Modalities:
Text
Formats:
csv
Libraries:
Datasets
pandas
File size: 1,104 Bytes
37effe8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7f10458
37effe8
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import os
import hashlib
import glob
import pathlib
from tqdm import tqdm
import subprocess


all_files = glob.glob("./*")

folder_path = pathlib.Path("train_valid_data")
print(folder_path)

all_files = list(folder_path.rglob("*"))

all_files = [i for i in all_files if not os.path.isdir(i)]
from pprint import pprint



def get_checksum(file_path):

    output = str(subprocess.Popen(["/usr/bin/sha256sum" , str(file_path)], stdout=subprocess.PIPE).communicate()[0].decode("UTF-8"))

    output = output.replace("  ", ",") 
    return output

# print(get_checksum(all_files[0]))

# all_files = all_files[:5]

with open("sha256_checksums.csv", "w+") as file:
    file.write("checksum,file_path\n")
    for file_path in tqdm(all_files):
        file.write(get_checksum(file_path))


import huggingface as hh


from huggingface_hub import HfApi

api = HfApi()
# upload results to a public dataset 
# also save as a file
api.upload_file(
    path_or_fileobj="sha256_checksums.csv",
    path_in_repo="sha256_checksums.csv",
    repo_id="osbm/project-checksums",
    repo_type="dataset",
    token="TOKEN"

)