LuxExistentia
commited on
Commit
•
a5ca5f7
1
Parent(s):
8e8ac18
Update app.py
Browse files
app.py
CHANGED
@@ -2,6 +2,7 @@ from custom_torch_module.deploy_utils import Onnx_deploy_model
|
|
2 |
import gradio as gr
|
3 |
import time
|
4 |
from PIL import Image
|
|
|
5 |
|
6 |
model_path = "deploying model/" + "vit_xsmall_patch16_clip_224(trainble_0.15) (Acc 98.44%, Loss 0.168152).onnx"
|
7 |
input_size = [1, 3, 224, 224]
|
@@ -23,12 +24,14 @@ def predict(img):
|
|
23 |
return pred_label_and_probs, prediction_fps
|
24 |
|
25 |
onnx_model = Onnx_deploy_model(model_path=model_path, img_size=img_size)
|
|
|
26 |
|
27 |
# Create the Gradio demo
|
28 |
demo = gr.Interface(fn=predict,
|
29 |
inputs=gr.Image(type="pil"),
|
30 |
outputs=[gr.Label(num_top_classes=2, label="Predictions"),
|
31 |
gr.Number(label="Prediction speed(FPS)")],
|
|
|
32 |
title=title,
|
33 |
description=description,
|
34 |
article=article)
|
|
|
2 |
import gradio as gr
|
3 |
import time
|
4 |
from PIL import Image
|
5 |
+
import os
|
6 |
|
7 |
model_path = "deploying model/" + "vit_xsmall_patch16_clip_224(trainble_0.15) (Acc 98.44%, Loss 0.168152).onnx"
|
8 |
input_size = [1, 3, 224, 224]
|
|
|
24 |
return pred_label_and_probs, prediction_fps
|
25 |
|
26 |
onnx_model = Onnx_deploy_model(model_path=model_path, img_size=img_size)
|
27 |
+
example_list = [["examples/" + example] for example in os.listdir("examples")]
|
28 |
|
29 |
# Create the Gradio demo
|
30 |
demo = gr.Interface(fn=predict,
|
31 |
inputs=gr.Image(type="pil"),
|
32 |
outputs=[gr.Label(num_top_classes=2, label="Predictions"),
|
33 |
gr.Number(label="Prediction speed(FPS)")],
|
34 |
+
examples=example_list,
|
35 |
title=title,
|
36 |
description=description,
|
37 |
article=article)
|