import gradio as gr
from huggingface_hub import get_token
from utils import load_model, load_image_from_url, inference, load_badges
from flagging import myHuggingFaceDatasetSaver
TITLE = """
RGB Detection Demo
Give it a try! Upload an image or enter a URL to an image and click
Submit
.
"""
NOTICE = """
See something off? Your feedback makes a difference! Let us know by
flagging any outcomes that don't seem right. Just click on `Flag`
to submit the image for review. Note that by clicking `Flag`, you
agree to the use of your image for A.I. improvement purposes.
"""
css = """
h1 {
text-align: center;
display: block;
}
"""
model = load_model("SEA-AI/yolov5n6-RGB", img_size=1280)
model.conf = 0.25
model.iou = 0.4
model.max_det = 100
model.agnostic = True # NMS class-agnostic
# This callback will be used to flag images
dataset_name = "SEA-AI/crowdsourced-sea-images"
hf_writer = myHuggingFaceDatasetSaver(get_token(), dataset_name)
with gr.Blocks(css=css) as demo:
badges = gr.HTML(load_badges(dataset_name, trials=1))
title = gr.HTML(TITLE)
with gr.Row():
with gr.Column():
img_input = gr.Image(label="input", interactive=True)
img_url = gr.Textbox(
lines=1,
placeholder="or enter URL to image here",
label="input_url",
show_label=False,
)
with gr.Row():
clear = gr.ClearButton()
submit = gr.Button("Submit", variant="primary")
with gr.Column():
img_output = gr.Image(
label="output", interactive=False, show_share_button=True
)
flag = gr.Button("Flag", visible=False)
notice = gr.Markdown(value=NOTICE, visible=False)
gr.Examples(
examples=[
"https://images.pexels.com/photos/273886/pexels-photo-273886.jpeg?auto=compress&cs=tinysrgb&w=1260&h=750&dpr=2",
"https://images.pexels.com/photos/913111/pexels-photo-913111.jpeg?auto=compress&cs=tinysrgb&w=1260&h=750&dpr=2",
"https://images.pexels.com/photos/88517/pexels-photo-88517.jpeg?auto=compress&cs=tinysrgb&w=1260&h=750&dpr=2",
],
inputs=img_input,
outputs=img_output,
fn=lambda image: inference(model, image),
cache_examples=True,
)
# add components to clear
clear.add([img_input, img_url, img_output])
# event listeners
img_url.change(load_image_from_url, [img_url], img_input)
submit.click(lambda image: inference(model, image), [img_input], img_output)
# event listeners with decorators
@img_output.change(inputs=[img_output], outputs=[flag, notice])
def show_hide(img_output):
visible = img_output is not None
return {
flag: gr.Button("Flag", visible=visible),
notice: gr.Markdown(value=NOTICE, visible=visible),
}
# This needs to be called prior to the first call to callback.flag()
hf_writer.setup([img_input], "flagged")
img_input.flag
# We can choose which components to flag (in this case, we'll flag all)
flag.click(lambda: gr.Info("Thank you for contributing!")).then(
lambda *args: hf_writer.flag(args),
[img_input, flag],
[],
preprocess=False,
).then(lambda: load_badges(dataset_name), [], badges)
if __name__ == "__main__":
demo.queue().launch()