|
|
|
from os import PathLike, listdir, remove |
|
from os.path import isfile, join, exists |
|
from mimetypes import guess_type |
|
from base64 import b64encode |
|
|
|
import pandas as pd |
|
import numpy as np |
|
from PIL import Image |
|
from PIL import ImageFile |
|
from tqdm import tqdm |
|
|
|
from uform import get_model |
|
from usearch.index import Index, MetricKind |
|
from usearch.io import save_matrix, load_matrix |
|
|
|
ImageFile.LOAD_TRUNCATED_IMAGES = True |
|
|
|
|
|
def is_image(path: PathLike) -> bool: |
|
if not isfile(path): |
|
return False |
|
try: |
|
Image.open(path) |
|
return True |
|
except Exception: |
|
return False |
|
|
|
|
|
def image_to_data(path: PathLike) -> str: |
|
"""Convert a file (specified by a path) into a data URI.""" |
|
if not exists(path): |
|
raise FileNotFoundError |
|
mime, _ = guess_type(path) |
|
with open(path, 'rb') as fp: |
|
data = fp.read() |
|
data64 = b64encode(data).decode('utf-8') |
|
return f'data:{mime}/jpg;base64,{data64}' |
|
|
|
|
|
def trim_extension(filename: str) -> str: |
|
return filename.rsplit('.', 1)[0] |
|
|
|
|
|
names = sorted(f for f in listdir('images') if is_image(join('images', f))) |
|
names = [trim_extension(f) for f in names] |
|
|
|
table = pd.read_table('images.tsv') if exists( |
|
'images.tsv') else pd.read_table('images.csv') |
|
table = table[table['photo_id'].isin(names)] |
|
table = table.sort_values('photo_id') |
|
table.reset_index() |
|
table.to_csv('images.csv', index=False) |
|
|
|
names = list(set(table['photo_id']).intersection(names)) |
|
names_to_delete = [f for f in listdir( |
|
'images') if trim_extension(f) not in names] |
|
names = list(table['photo_id']) |
|
|
|
if len(names_to_delete) > 0: |
|
print(f'Plans to delete: {len(names_to_delete)} images without metadata') |
|
for name in names_to_delete: |
|
remove(join('images', name)) |
|
|
|
if not exists('images.fbin'): |
|
model = get_model('unum-cloud/uform-vl-multilingual') |
|
vectors = [] |
|
|
|
for name in tqdm(names, desc='Vectorizing images'): |
|
image = Image.open(join('images', name + '.jpg')) |
|
image_data = model.preprocess_image(image) |
|
image_embedding = model.encode_image(image_data).detach().numpy() |
|
vectors.append(image_embedding) |
|
|
|
image_mat = np.vstack(vectors) |
|
save_matrix(image_mat, 'images.fbin') |
|
|
|
if not exists('images.txt'): |
|
|
|
datas = [] |
|
for name in tqdm(names, desc='Encoding images'): |
|
data = image_to_data(join('images', name + '.jpg')) |
|
datas.append(data) |
|
|
|
with open('images.txt', 'w') as f: |
|
f.write('\n'.join(datas)) |
|
|
|
|
|
if not exists('images.usearch'): |
|
image_mat = load_matrix('images.fbin') |
|
count = image_mat.shape[0] |
|
ndim = image_mat.shape[1] |
|
index = Index(ndim=ndim, metric=MetricKind.Cos) |
|
|
|
for idx in tqdm(range(count), desc='Indexing vectors'): |
|
index.add(idx, image_mat[idx, :].flatten()) |
|
|
|
index.save('images.usearch') |
|
|