Datasets:
File size: 1,004 Bytes
d17161f 2018fe1 d17161f 2018fe1 9e31ba2 d17161f 9e31ba2 2018fe1 d17161f 2018fe1 d17161f 2018fe1 d17161f 2018fe1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
import json
import os
from pathlib import Path
import json5
import jsonschema
from tqdm.contrib.concurrent import process_map
def process_file(schema_file):
# Calculate the path of the new file
new_schema_file = Path("valid_data", *schema_file.parts[1:])
# Skip any directories named with .json at the end
if not schema_file.is_file() and not new_schema_file.is_file():
return
try:
schema = json5.load(open(schema_file))
except ValueError:
return
vcls = jsonschema.validators.validator_for(schema)
try:
vcls.check_schema(schema)
except jsonschema.exceptions.SchemaError:
return
new_schema_file = Path("valid_data", *schema_file.parts[1:])
Path.mkdir(new_schema_file.parent, parents=True, exist_ok=True)
json.dump(schema, open(new_schema_file, "w"), sort_keys=True, indent=2)
if __name__ == "__main__":
data_path = Path("data")
process_map(process_file, list(data_path.rglob("*.json")), chunksize=10)
|