Spaces:
Running
Running
Commit
·
585407e
1
Parent(s):
ac783cd
Refactor inference function in app.py to accept image file path, update output to include file path, and adjust input type in the UI.
Browse files
app.py
CHANGED
@@ -20,7 +20,7 @@ from utils import (
|
|
20 |
from flagging import HuggingFaceDatasetSaver
|
21 |
|
22 |
import install_private_repos # noqa: F401
|
23 |
-
from seavision import load_model
|
24 |
|
25 |
|
26 |
TITLE = """
|
@@ -53,10 +53,11 @@ model.hor_conf_thresh = 0.1
|
|
53 |
|
54 |
|
55 |
# @spaces.GPU
|
56 |
-
def inference(
|
57 |
"""Run inference on image and return annotated image."""
|
|
|
58 |
results = model(image)
|
59 |
-
return results.draw(image)
|
60 |
|
61 |
|
62 |
def flag_img_input(
|
@@ -64,7 +65,7 @@ def flag_img_input(
|
|
64 |
):
|
65 |
"""Wrapper for flagging"""
|
66 |
print(f"{image=}, {flag_option=}, {username=}")
|
67 |
-
|
68 |
|
69 |
|
70 |
# Flagging
|
@@ -84,7 +85,7 @@ with gr.Blocks(theme=theme, css=css, title="SEA.AI Vision Demo") as demo:
|
|
84 |
label="input",
|
85 |
interactive=True,
|
86 |
sources=["upload", "clipboard"],
|
87 |
-
type="
|
88 |
)
|
89 |
img_url = gr.Textbox(
|
90 |
lines=1,
|
@@ -97,13 +98,14 @@ with gr.Blocks(theme=theme, css=css, title="SEA.AI Vision Demo") as demo:
|
|
97 |
submit = gr.Button("Submit", variant="primary")
|
98 |
with gr.Column():
|
99 |
img_output = gr.Image(label="output", interactive=False)
|
|
|
100 |
flag = gr.Button(FLAG_TXT, visible=False)
|
101 |
notice = gr.Markdown(value=NOTICE, visible=False)
|
102 |
|
103 |
examples = gr.Examples(
|
104 |
examples=glob.glob("examples/*.jpg"),
|
105 |
inputs=img_input,
|
106 |
-
outputs=img_output,
|
107 |
fn=inference,
|
108 |
cache_examples=True,
|
109 |
)
|
@@ -116,7 +118,7 @@ with gr.Blocks(theme=theme, css=css, title="SEA.AI Vision Demo") as demo:
|
|
116 |
submit.click(check_image, [img_input], None, show_api=False).success(
|
117 |
inference,
|
118 |
[img_input],
|
119 |
-
img_output,
|
120 |
api_name="inference",
|
121 |
)
|
122 |
|
|
|
20 |
from flagging import HuggingFaceDatasetSaver
|
21 |
|
22 |
import install_private_repos # noqa: F401
|
23 |
+
from seavision import load_model, imread
|
24 |
|
25 |
|
26 |
TITLE = """
|
|
|
53 |
|
54 |
|
55 |
# @spaces.GPU
|
56 |
+
def inference(image_fp):
|
57 |
"""Run inference on image and return annotated image."""
|
58 |
+
image = imread(image_fp)
|
59 |
results = model(image)
|
60 |
+
return results.draw(image), image_fp
|
61 |
|
62 |
|
63 |
def flag_img_input(
|
|
|
65 |
):
|
66 |
"""Wrapper for flagging"""
|
67 |
print(f"{image=}, {flag_option=}, {username=}")
|
68 |
+
hf_writer.flag([image], flag_option=flag_option, username=username)
|
69 |
|
70 |
|
71 |
# Flagging
|
|
|
85 |
label="input",
|
86 |
interactive=True,
|
87 |
sources=["upload", "clipboard"],
|
88 |
+
type="filepath",
|
89 |
)
|
90 |
img_url = gr.Textbox(
|
91 |
lines=1,
|
|
|
98 |
submit = gr.Button("Submit", variant="primary")
|
99 |
with gr.Column():
|
100 |
img_output = gr.Image(label="output", interactive=False)
|
101 |
+
img_filepath = gr.Textbox(label="image_fp", visible=False)
|
102 |
flag = gr.Button(FLAG_TXT, visible=False)
|
103 |
notice = gr.Markdown(value=NOTICE, visible=False)
|
104 |
|
105 |
examples = gr.Examples(
|
106 |
examples=glob.glob("examples/*.jpg"),
|
107 |
inputs=img_input,
|
108 |
+
outputs=[img_output, img_filepath],
|
109 |
fn=inference,
|
110 |
cache_examples=True,
|
111 |
)
|
|
|
118 |
submit.click(check_image, [img_input], None, show_api=False).success(
|
119 |
inference,
|
120 |
[img_input],
|
121 |
+
[img_output, img_filepath],
|
122 |
api_name="inference",
|
123 |
)
|
124 |
|