|
import sqlite3 |
|
import coremltools as ct |
|
from transformers import AutoTokenizer |
|
import numpy as np |
|
from usearch.index import Index, Matches |
|
|
|
def tokenize(text): |
|
return tokenizer( |
|
text, |
|
add_special_tokens=True, |
|
max_length=512, |
|
padding='max_length', |
|
truncation=True, |
|
return_attention_mask=True, |
|
return_tensors='np' |
|
) |
|
|
|
def embed(text): |
|
result = tokenize(text) |
|
token_ids = result['input_ids'].astype(np.float32) |
|
mask = result['attention_mask'].astype(np.float32) |
|
|
|
|
|
predictions = model.predict({"input_ids": token_ids, "attention_mask": mask}) |
|
return predictions['embeddings'][0] |
|
|
|
lang = "simple" |
|
date = "20240801" |
|
model = ct.models.CompiledMLModel('./msmarco_distilbert_base_tas_b_512_single_quantized.mlmodelc') |
|
tokenizer = AutoTokenizer.from_pretrained("sentence-transformers/msmarco-distilbert-base-tas-b") |
|
|
|
precision = "f16" |
|
index_path = f"./{lang}wiki-{date}.{precision}.index" |
|
index = Index.restore(index_path, view=True) |
|
|
|
db_name = f"wikipedia_{lang}_{date}.db" |
|
conn = sqlite3.connect(db_name) |
|
cursor = conn.cursor() |
|
|
|
query = "what is the capital of AUS?" |
|
|
|
|
|
print(f"🔎 testing search... '{query}'") |
|
qembed = embed(query) |
|
res: Matches = index.search(qembed, 5) |
|
print(f" - Results:") |
|
for result in res: |
|
(title, section, text) = cursor.execute("SELECT title, section_name, text FROM article_sections WHERE id = ?;", (f"{result.key}",)).fetchone() |
|
snippet = text[:280].replace("\n", " ") |
|
print(f" - Key: {result.key} | Distance: {result.distance} | Excerpt from '{title}', '{section}': {snippet}") |
|
|
|
conn.close() |
|
|