File size: 3,351 Bytes
95669d7 854043f e01653e 854043f b003e9f 0b5daf1 b003e9f e01653e 0b5daf1 e01653e 0b5daf1 854043f b003e9f 854043f b003e9f 854043f 95669d7 b003e9f 95669d7 b003e9f 95669d7 0b5daf1 b003e9f 0b5daf1 b003e9f 0b5daf1 b003e9f 95669d7 b003e9f 95669d7 b003e9f e01653e 0b5daf1 b003e9f e01653e 0b5daf1 14417ab b003e9f 0b5daf1 b003e9f 854043f b003e9f 854043f b003e9f 854043f b003e9f 854043f b003e9f e01653e 14417ab 0b5daf1 b003e9f e01653e 0b5daf1 b003e9f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
#!/usr/bin/env python3
from os import PathLike, listdir, remove
from os.path import isfile, join, exists
from mimetypes import guess_type
from base64 import b64encode, b64decode
from io import BytesIO
import re
import pandas as pd
import numpy as np
from PIL import Image
from PIL import ImageFile
from tqdm import tqdm
from uform import get_model_onnx
from usearch.index import Index, MetricKind
from usearch.io import save_matrix, load_matrix
ImageFile.LOAD_TRUNCATED_IMAGES = True
def is_image(path: PathLike) -> bool:
if not isfile(path):
return False
try:
Image.open(path)
return True
except Exception:
return False
def image_to_data(path: PathLike) -> str:
"""Convert a file (specified by a path) into a data URI."""
if not exists(path):
raise FileNotFoundError
mime, _ = guess_type(path)
with open(path, "rb") as fp:
data = fp.read()
data64 = b64encode(data).decode("utf-8")
return f"data:{mime}/jpg;base64,{data64}"
def data_to_image(data_uri: str) -> Image:
"""Convert a base64-encoded data URI to a Pillow Image."""
base64_str = re.search(r"base64,(.*)", data_uri).group(1)
image_data = b64decode(base64_str)
image = Image.open(BytesIO(image_data))
return image
def trim_extension(filename: str) -> str:
return filename.rsplit(".", 1)[0]
names = sorted(f for f in listdir("images") if is_image(join("images", f)))
names = [trim_extension(f) for f in names]
table = (
pd.read_table("images.tsv") if exists("images.tsv") else pd.read_table("images.csv")
)
table = table[table["photo_id"].isin(names)]
table = table.sort_values("photo_id")
table.reset_index()
table.to_csv("images.csv", index=False)
names = list(set(table["photo_id"]).intersection(names))
names_to_delete = [f for f in listdir("images") if trim_extension(f) not in names]
names = list(table["photo_id"])
if len(names_to_delete) > 0:
print(f"Plans to delete: {len(names_to_delete)} images without metadata")
for name in names_to_delete:
remove(join("images", name))
if not exists("images.fbin") and 0:
model, processor = get_model_onnx(
"unum-cloud/uform-vl-english-small",
device="cpu",
dtype="fp32",
)
vectors = []
for name in tqdm(names, desc="Vectorizing images"):
image = Image.open(join("images", name + ".jpg"))
image_data = processor.preprocess_image(image)
image_embedding = model.encode_image(image_data)
vectors.append(image_embedding)
image_mat = np.vstack(vectors)
save_matrix(image_mat, "images.fbin")
if not exists("images.base64.txt"):
datas = []
for name in tqdm(names, desc="Encoding images"):
data = image_to_data(join("images", name + ".jpg"))
datas.append(data)
with open("images.base64.txt", "w") as f:
f.write("\n".join(datas))
if not exists("images.names.txt"):
with open("images.names.txt", "w") as f:
f.write("\n".join(names))
if not exists("images.usearch"):
image_mat = load_matrix("images.fbin")
count = image_mat.shape[0]
ndim = image_mat.shape[1]
index = Index(ndim=ndim, metric=MetricKind.Cos)
for idx in tqdm(range(count), desc="Indexing vectors"):
index.add(idx, image_mat[idx, :].flatten())
index.save("images.usearch")
|