Spaces:
Runtime error
Runtime error
Samuel Schmidt
commited on
Commit
·
9a5647a
1
Parent(s):
5a5b371
Update src/app.py
Browse files- src/app.py +7 -7
src/app.py
CHANGED
@@ -65,7 +65,7 @@ def get_neighbors(query_image, selected_descriptor, top_k=5):
|
|
65 |
Returns:
|
66 |
A list of the top_k most similar images as PIL objects.
|
67 |
"""
|
68 |
-
if "Color Descriptor"
|
69 |
cd = ColorDescriptor((8, 12, 3))
|
70 |
qi_embedding = cd.describe(query_image)
|
71 |
qi_np = np.array(qi_embedding)
|
@@ -73,14 +73,14 @@ def get_neighbors(query_image, selected_descriptor, top_k=5):
|
|
73 |
'color_embeddings', qi_np, k=top_k)
|
74 |
images = retrieved_examples['image'] #retrieved images is a dict, with images and embeddings
|
75 |
return images
|
76 |
-
if "CLIP"
|
77 |
clip_model = CLIPImageEncoder()
|
78 |
qi_embedding = clip_model.encode_image(query_image)
|
79 |
scores, retrieved_examples = dataset_with_embeddings.get_nearest_examples(
|
80 |
'clip_embeddings', qi_embedding, k=top_k)
|
81 |
images = retrieved_examples['image']
|
82 |
return images
|
83 |
-
if "LBP"
|
84 |
lbp_model = LBPImageEncoder(8,2)
|
85 |
qi_embedding = lbp_model.describe(query_image)
|
86 |
scores, retrieved_examples = dataset_with_embeddings.get_nearest_examples(
|
@@ -117,14 +117,14 @@ with gr.Blocks() as demo:
|
|
117 |
""")
|
118 |
|
119 |
with gr.Row():
|
|
|
120 |
checkboxes_descr = gr.CheckboxGroup(["Color Descriptor", "LBP", "CLIP"], label="Please choose an descriptor")
|
121 |
dataset_dropdown = gr.Dropdown(
|
122 |
["huggan/CelebA-faces", "EIT/cbir-eit"],
|
123 |
-
value=
|
|
|
124 |
)
|
125 |
-
|
126 |
-
btn_index.click(load_cbir_dataset, inputs=[dataset_dropdown])
|
127 |
-
btn.click(get_neighbors, inputs=[image_input, checkboxes_descr], outputs=[gallery_output])
|
128 |
|
129 |
# gr.Markdown(
|
130 |
# """
|
|
|
65 |
Returns:
|
66 |
A list of the top_k most similar images as PIL objects.
|
67 |
"""
|
68 |
+
if "Color Descriptor" == selected_descriptor:
|
69 |
cd = ColorDescriptor((8, 12, 3))
|
70 |
qi_embedding = cd.describe(query_image)
|
71 |
qi_np = np.array(qi_embedding)
|
|
|
73 |
'color_embeddings', qi_np, k=top_k)
|
74 |
images = retrieved_examples['image'] #retrieved images is a dict, with images and embeddings
|
75 |
return images
|
76 |
+
if "CLIP" == selected_descriptor:
|
77 |
clip_model = CLIPImageEncoder()
|
78 |
qi_embedding = clip_model.encode_image(query_image)
|
79 |
scores, retrieved_examples = dataset_with_embeddings.get_nearest_examples(
|
80 |
'clip_embeddings', qi_embedding, k=top_k)
|
81 |
images = retrieved_examples['image']
|
82 |
return images
|
83 |
+
if "LBP" == selected_descriptor:
|
84 |
lbp_model = LBPImageEncoder(8,2)
|
85 |
qi_embedding = lbp_model.describe(query_image)
|
86 |
scores, retrieved_examples = dataset_with_embeddings.get_nearest_examples(
|
|
|
117 |
""")
|
118 |
|
119 |
with gr.Row():
|
120 |
+
descr_dropdown = gr.Dropdown(["Color Descriptor", "LBP", "CLIP"], value="LBP", label="Please choose an descriptor")
|
121 |
checkboxes_descr = gr.CheckboxGroup(["Color Descriptor", "LBP", "CLIP"], label="Please choose an descriptor")
|
122 |
dataset_dropdown = gr.Dropdown(
|
123 |
["huggan/CelebA-faces", "EIT/cbir-eit"],
|
124 |
+
value="huggan/CelebA-faces",
|
125 |
+
label="Please select a dataset"
|
126 |
)
|
127 |
+
btn.click(get_neighbors, inputs=[image_input, descr_dropdown], outputs=[gallery_output])
|
|
|
|
|
128 |
|
129 |
# gr.Markdown(
|
130 |
# """
|