Spaces:
Running
Running
File size: 2,859 Bytes
a083fd4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 |
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
"""Demo based on ModelAPI."""
import sys
from argparse import SUPPRESS, ArgumentParser
from pathlib import Path
from demo_package import AsyncExecutor, ModelWrapper, SyncExecutor, create_visualizer
def build_argparser() -> ArgumentParser:
"""Returns an ArgumentParser for parsing command line arguments."""
parser = ArgumentParser(add_help=False)
args = parser.add_argument_group("Options")
args.add_argument(
"-h",
"--help",
action="help",
default=SUPPRESS,
help="Show this help message and exit.",
)
args.add_argument(
"-i",
"--input",
required=True,
help="Required. An input to process. The input must be a single image, "
"a folder of images, video file or camera id.",
)
args.add_argument(
"-m",
"--model",
help="Optional. Path to directory with trained model and configuration file. "
"Default value points to deployed model folder '../model'.",
default=Path("../model"),
type=Path,
)
args.add_argument(
"-it",
"--inference_type",
help="Optional. Type of inference for single model.",
choices=["sync", "async"],
default="async",
type=str,
)
args.add_argument(
"-l",
"--loop",
help="Optional. Enable reading the input in a loop.",
default=False,
action="store_true",
)
args.add_argument(
"--no_show",
help="Optional. Disables showing inference results on UI.",
default=False,
action="store_true",
)
args.add_argument(
"-d",
"--device",
help="Optional. Device to infer the model.",
choices=["CPU", "GPU"],
default="CPU",
type=str,
)
args.add_argument(
"--output",
default="./outputs/model_visualization",
type=str,
help="Optional. Output path to save input data with predictions.",
)
return parser
EXECUTORS = {
"sync": SyncExecutor,
"async": AsyncExecutor,
}
def main() -> int:
"""Main function that is used to run demo."""
args = build_argparser().parse_args()
if args.loop and args.output:
msg = "--loop and --output cannot be both specified"
raise ValueError(msg)
# create models
model = ModelWrapper(args.model, device=args.device)
inferencer = EXECUTORS[args.inference_type]
# create visualizer
visualizer = create_visualizer(model.task_type, model.labels, no_show=args.no_show, output=args.output)
# create inferencer and run
demo = inferencer(model, visualizer)
demo.run(args.input, args.loop and not args.no_show)
return 0
if __name__ == "__main__":
sys.exit(main())
|