Spaces:
Sleeping
Sleeping
Commit
•
fcf7625
1
Parent(s):
c57954b
add openvino model
Browse files- app.py +12 -3
- config_store.py +0 -10
app.py
CHANGED
@@ -46,6 +46,8 @@ def parse_configs(inputs):
|
|
46 |
model = value
|
47 |
elif key.label == "task":
|
48 |
task = value
|
|
|
|
|
49 |
elif "." in key.label:
|
50 |
backend, argument = key.label.split(".")
|
51 |
configs[backend][argument] = value
|
@@ -65,9 +67,10 @@ def parse_configs(inputs):
|
|
65 |
device=DEVICE,
|
66 |
**configs["pytorch"],
|
67 |
)
|
|
|
68 |
configs["openvino"] = OVConfig(
|
69 |
task=task,
|
70 |
-
model=model,
|
71 |
device=DEVICE,
|
72 |
**configs["openvino"],
|
73 |
)
|
@@ -165,12 +168,17 @@ with gr.Blocks() as demo:
|
|
165 |
|
166 |
with gr.Column(variant="panel"):
|
167 |
model = HuggingfaceHubSearch(
|
168 |
-
placeholder="Search for a model",
|
169 |
-
sumbit_on_select=True,
|
170 |
search_type="model",
|
171 |
label="model",
|
172 |
)
|
173 |
|
|
|
|
|
|
|
|
|
|
|
|
|
174 |
with gr.Row():
|
175 |
task = gr.Dropdown(
|
176 |
info="Task to run the benchmark on.",
|
@@ -207,6 +215,7 @@ with gr.Blocks() as demo:
|
|
207 |
inputs={
|
208 |
task,
|
209 |
model,
|
|
|
210 |
# backends,
|
211 |
*process_config.values(),
|
212 |
*inference_config.values(),
|
|
|
46 |
model = value
|
47 |
elif key.label == "task":
|
48 |
task = value
|
49 |
+
elif key.label == "openvino_model":
|
50 |
+
openvino_label = value
|
51 |
elif "." in key.label:
|
52 |
backend, argument = key.label.split(".")
|
53 |
configs[backend][argument] = value
|
|
|
67 |
device=DEVICE,
|
68 |
**configs["pytorch"],
|
69 |
)
|
70 |
+
|
71 |
configs["openvino"] = OVConfig(
|
72 |
task=task,
|
73 |
+
model=openvino_label or model,
|
74 |
device=DEVICE,
|
75 |
**configs["openvino"],
|
76 |
)
|
|
|
168 |
|
169 |
with gr.Column(variant="panel"):
|
170 |
model = HuggingfaceHubSearch(
|
171 |
+
placeholder="Search for a PyTorch model",
|
|
|
172 |
search_type="model",
|
173 |
label="model",
|
174 |
)
|
175 |
|
176 |
+
openvino_model = HuggingfaceHubSearch(
|
177 |
+
placeholder="Search for an OpenVINO model (optional)",
|
178 |
+
search_type="model",
|
179 |
+
label="openvino_model",
|
180 |
+
)
|
181 |
+
|
182 |
with gr.Row():
|
183 |
task = gr.Dropdown(
|
184 |
info="Task to run the benchmark on.",
|
|
|
215 |
inputs={
|
216 |
task,
|
217 |
model,
|
218 |
+
openvino_model,
|
219 |
# backends,
|
220 |
*process_config.values(),
|
221 |
*inference_config.values(),
|
config_store.py
CHANGED
@@ -73,21 +73,11 @@ def get_pytorch_config():
|
|
73 |
choices=["bfloat16", "float16", "float32", "auto"],
|
74 |
info="The dtype to use for the model",
|
75 |
),
|
76 |
-
"pytorch.torch_compile": gr.Checkbox(
|
77 |
-
value=False,
|
78 |
-
label="pytorch.torch_compile",
|
79 |
-
info="Compiles the model with torch.compile",
|
80 |
-
),
|
81 |
}
|
82 |
|
83 |
|
84 |
def get_openvino_config():
|
85 |
return {
|
86 |
-
"openvino.export": gr.Checkbox(
|
87 |
-
value=True,
|
88 |
-
label="openvino.export",
|
89 |
-
info="Exports the model to OpenVINO",
|
90 |
-
),
|
91 |
"openvino.use_cache": gr.Checkbox(
|
92 |
value=True,
|
93 |
label="openvino.use_cache",
|
|
|
73 |
choices=["bfloat16", "float16", "float32", "auto"],
|
74 |
info="The dtype to use for the model",
|
75 |
),
|
|
|
|
|
|
|
|
|
|
|
76 |
}
|
77 |
|
78 |
|
79 |
def get_openvino_config():
|
80 |
return {
|
|
|
|
|
|
|
|
|
|
|
81 |
"openvino.use_cache": gr.Checkbox(
|
82 |
value=True,
|
83 |
label="openvino.use_cache",
|