merve HF staff commited on
Commit
29afcce
·
1 Parent(s): 37254dd

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -0
app.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import pipeline
3
+ import gradio as gr
4
+
5
+
6
+ siglip_checkpoint = "nielsr/siglip-base-patch16-224"
7
+ clip_checkpoint = "openai/clip-vit-base-patch16"
8
+ siglip_detector = pipeline(model=siglip_checkpoint, task="zero-shot-image-classification")
9
+ clip_detector = pipeline(model=clip_checkpoint, task="zero-shot-image-classification")
10
+
11
+ def postprocess(output):
12
+ return {out["label"]: float(out["score"]) for out in output}
13
+
14
+
15
+ def infer(image, candidate_labels):
16
+ candidate_labels = [label.lstrip(" ") for label in candidate_labels.split(",")]
17
+ siglip_out = siglip_detector(image, candidate_labels=candidate_labels)
18
+ clip_out = clip_detector(image, candidate_labels=candidate_labels)
19
+ return postprocess(clip_out), postprocess(siglip_out)
20
+
21
+
22
+ with gr.Blocks() as demo:
23
+ gr.Markdown("# Compare CLIP and SigLIP")
24
+ gr.Markdown("Compare the performance of CLIP and SigLIP on zero-shot classification in this Space 👇")
25
+ with gr.Row():
26
+ with gr.Column():
27
+ image_input = gr.Image(type="pil")
28
+ text_input = gr.Textbox(label="Input a list of labels")
29
+ run_button = gr.Button("Run", visible=True)
30
+
31
+ with gr.Column():
32
+ clip_output = gr.Label(label = "CLIP Output", num_top_classes=3)
33
+ siglip_output = gr.Label(label = "SigLIP Output", num_top_classes=3)
34
+
35
+ examples = [["./baklava.jpg", "baklava, souffle, tiramisu"]]
36
+ gr.Examples(
37
+ examples = examples,
38
+ inputs=[image_input, text_input],
39
+ outputs=[clip_output,
40
+ siglip_output
41
+ ],
42
+ fn=infer,
43
+ cache_examples=True
44
+ )
45
+ run_button.click(fn=infer,
46
+ inputs=[image_input, text_input],
47
+ outputs=[clip_output,
48
+ siglip_output
49
+ ])