Spaces:
Runtime error
Runtime error
Commit
·
9a64f12
1
Parent(s):
f92f202
Update app.py
Browse files
app.py
CHANGED
@@ -10,10 +10,9 @@ import requests
|
|
10 |
|
11 |
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32")
|
12 |
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
13 |
-
df =
|
14 |
-
|
15 |
-
|
16 |
-
embeddings[k] = np.divide(embeddings[k], np.sqrt(np.sum(embeddings[k]**2, axis=1, keepdims=True)))
|
17 |
|
18 |
def compute_text_embeddings(list_of_strings):
|
19 |
inputs = processor(text=list_of_strings, return_tensors="pt", padding=True)
|
@@ -27,13 +26,10 @@ def download_img(path):
|
|
27 |
return local_path
|
28 |
|
29 |
def predict(query):
|
30 |
-
corpus = 'Movies'
|
31 |
n_results=3
|
32 |
-
|
33 |
text_embeddings = compute_text_embeddings([query]).detach().numpy()
|
34 |
-
|
35 |
-
|
36 |
-
paths = [download_img(df[k].iloc[i]['path']) for i in results]
|
37 |
print(paths)
|
38 |
return paths
|
39 |
|
|
|
10 |
|
11 |
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32")
|
12 |
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
13 |
+
df = pd.read_csv('data2.csv')
|
14 |
+
embeddings_npy = np.load('embeddings.npy')
|
15 |
+
embeddings = np.divide(embeddings_npy, np.sqrt(np.sum(embeddings_npy**2, axis=1, keepdims=True)))
|
|
|
16 |
|
17 |
def compute_text_embeddings(list_of_strings):
|
18 |
inputs = processor(text=list_of_strings, return_tensors="pt", padding=True)
|
|
|
26 |
return local_path
|
27 |
|
28 |
def predict(query):
|
|
|
29 |
n_results=3
|
|
|
30 |
text_embeddings = compute_text_embeddings([query]).detach().numpy()
|
31 |
+
results = np.argsort((embeddings@text_embeddings.T)[:, 0])[-1:-n_results-1:-1]
|
32 |
+
paths = [download_img(df.iloc[i]['path']) for i in results]
|
|
|
33 |
print(paths)
|
34 |
return paths
|
35 |
|