Spaces:
Sleeping
Sleeping
kevinconka
commited on
Commit
·
869f4d7
1
Parent(s):
4725ead
added examples from own DB
Browse files- app.py +9 -9
- examples/22825932.jpg +0 -0
- examples/23411057.jpg +0 -0
- examples/23698288.jpg +0 -0
- examples/23719419.jpg +0 -0
app.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import get_token
|
3 |
from utils import load_model, load_image_from_url, inference, load_badges
|
@@ -27,7 +28,7 @@ h1 {
|
|
27 |
"""
|
28 |
|
29 |
model = load_model("SEA-AI/yolov5n6-RGB", img_size=1280)
|
30 |
-
model.conf = 0.
|
31 |
model.iou = 0.4
|
32 |
model.max_det = 100
|
33 |
model.agnostic = True # NMS class-agnostic
|
@@ -60,11 +61,7 @@ with gr.Blocks(css=css) as demo:
|
|
60 |
notice = gr.Markdown(value=NOTICE, visible=False)
|
61 |
|
62 |
gr.Examples(
|
63 |
-
examples=
|
64 |
-
"https://images.pexels.com/photos/273886/pexels-photo-273886.jpeg?auto=compress&cs=tinysrgb&w=1260&h=750&dpr=2",
|
65 |
-
"https://images.pexels.com/photos/913111/pexels-photo-913111.jpeg?auto=compress&cs=tinysrgb&w=1260&h=750&dpr=2",
|
66 |
-
"https://images.pexels.com/photos/88517/pexels-photo-88517.jpeg?auto=compress&cs=tinysrgb&w=1260&h=750&dpr=2",
|
67 |
-
],
|
68 |
inputs=img_input,
|
69 |
outputs=img_output,
|
70 |
fn=lambda image: inference(model, image),
|
@@ -83,21 +80,24 @@ with gr.Blocks(css=css) as demo:
|
|
83 |
def show_hide(img_output):
|
84 |
visible = img_output is not None
|
85 |
return {
|
86 |
-
flag: gr.Button("Flag", visible=visible),
|
87 |
notice: gr.Markdown(value=NOTICE, visible=visible),
|
88 |
}
|
89 |
|
90 |
# This needs to be called prior to the first call to callback.flag()
|
91 |
hf_writer.setup([img_input], "flagged")
|
92 |
-
img_input.flag
|
93 |
|
94 |
# We can choose which components to flag (in this case, we'll flag all)
|
95 |
flag.click(lambda: gr.Info("Thank you for contributing!")).then(
|
|
|
|
|
96 |
lambda *args: hf_writer.flag(args),
|
97 |
[img_input, flag],
|
98 |
[],
|
99 |
preprocess=False,
|
100 |
-
).then(
|
|
|
|
|
101 |
|
102 |
if __name__ == "__main__":
|
103 |
demo.queue().launch()
|
|
|
1 |
+
import glob
|
2 |
import gradio as gr
|
3 |
from huggingface_hub import get_token
|
4 |
from utils import load_model, load_image_from_url, inference, load_badges
|
|
|
28 |
"""
|
29 |
|
30 |
model = load_model("SEA-AI/yolov5n6-RGB", img_size=1280)
|
31 |
+
model.conf = 0.2
|
32 |
model.iou = 0.4
|
33 |
model.max_det = 100
|
34 |
model.agnostic = True # NMS class-agnostic
|
|
|
61 |
notice = gr.Markdown(value=NOTICE, visible=False)
|
62 |
|
63 |
gr.Examples(
|
64 |
+
examples=glob.glob("examples/*.jpg"),
|
|
|
|
|
|
|
|
|
65 |
inputs=img_input,
|
66 |
outputs=img_output,
|
67 |
fn=lambda image: inference(model, image),
|
|
|
80 |
def show_hide(img_output):
|
81 |
visible = img_output is not None
|
82 |
return {
|
83 |
+
flag: gr.Button("Flag", visible=visible, interactive=True),
|
84 |
notice: gr.Markdown(value=NOTICE, visible=visible),
|
85 |
}
|
86 |
|
87 |
# This needs to be called prior to the first call to callback.flag()
|
88 |
hf_writer.setup([img_input], "flagged")
|
|
|
89 |
|
90 |
# We can choose which components to flag (in this case, we'll flag all)
|
91 |
flag.click(lambda: gr.Info("Thank you for contributing!")).then(
|
92 |
+
lambda: {flag: gr.Button("Flag", interactive=False)}, [], [flag]
|
93 |
+
).then(
|
94 |
lambda *args: hf_writer.flag(args),
|
95 |
[img_input, flag],
|
96 |
[],
|
97 |
preprocess=False,
|
98 |
+
).then(
|
99 |
+
lambda: load_badges(dataset_name), [], badges
|
100 |
+
)
|
101 |
|
102 |
if __name__ == "__main__":
|
103 |
demo.queue().launch()
|
examples/22825932.jpg
ADDED
examples/23411057.jpg
ADDED
examples/23698288.jpg
ADDED
examples/23719419.jpg
ADDED