File size: 2,398 Bytes
9b0f4a0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import tempfile
import os
import time
import hashlib
import shutil

cache_dir = os.path.join(tempfile.gettempdir(), "cache")
os.makedirs(cache_dir, exist_ok=True)
time_filename = "update_time"
max_cache = 5


def deterministic_hash(obj):
    hash_object = hashlib.sha256()
    hash_object.update(str(obj).encode())
    return hash_object.hexdigest()[0:20]


def get_dirs():
    dirs = [
        os.path.join(cache_dir, dir)
        for dir in os.listdir(cache_dir)
        if os.path.isdir(os.path.join(cache_dir, dir))
    ]
    return dirs


def get_time(dir):
    try:
        timefile = os.path.join(dir, time_filename)
        t = float(open(timefile, encoding="utf-8").read())
        return t
    except FileNotFoundError:
        # handle the error as needed, for now we'll just return a default value
        return float(
            "inf"
        )  # This ensures that this directory will be the first to be removed if required


def write_time(dir):
    timefile = os.path.join(dir, time_filename)
    t = time.time()
    print(t, file=open(timefile, "w", encoding="utf-8"), end="")


def argmin(iterable):
    return min(enumerate(iterable), key=lambda x: x[1])[0]


def remove_extra():
    dirs = get_dirs()
    for dir in dirs:
        if not os.path.isdir(
            dir
        ):  # This line might be redundant now, as get_dirs() ensures only directories are returned
            os.remove(dir)
        try:
            get_time(dir)
        except BaseException:
            shutil.rmtree(dir)
    while True:
        dirs = get_dirs()
        if len(dirs) <= max_cache:
            break
        times = [get_time(dir) for dir in dirs]
        arg = argmin(times)
        shutil.rmtree(dirs[arg])


def is_cached(hash_key):
    dir = os.path.join(cache_dir, hash_key)
    return os.path.exists(dir)


def create_cache(hash_key):
    dir = os.path.join(cache_dir, hash_key)
    os.makedirs(dir, exist_ok=True)
    write_time(dir)


def load_paragraph(hash_key, hash_key_paragraph):
    filename = os.path.join(cache_dir, hash_key, hash_key_paragraph)
    if os.path.exists(filename):
        return open(filename, encoding="utf-8").read()
    else:
        return None


def write_paragraph(hash_key, hash_key_paragraph, paragraph):
    filename = os.path.join(cache_dir, hash_key, hash_key_paragraph)
    print(paragraph, file=open(filename, "w", encoding="utf-8"), end="")