File size: 1,295 Bytes
61ab183
 
 
 
 
 
 
 
8bac5ee
61ab183
 
 
 
 
8bac5ee
61ab183
 
 
 
 
 
8bac5ee
61ab183
 
 
 
 
 
8bac5ee
 
 
 
61ab183
 
8bac5ee
61ab183
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37

import gradio as gr
import torch

###############

def yolov7_inference(
    image: gr.inputs.Image = None,
    conf_threshold: gr.inputs.Slider = 0.50,
):

    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
    path = 'y7-prdef.pt'
    model = torch.hub.load("WongKinYiu/yolov7","custom",f"{path}")
    model.conf = conf_threshold
    results = model([image], size=640)
    return results.render()[0]
        

inputs = [
    gr.inputs.Image(type="pil", label="Input Image"),
    gr.inputs.Slider(minimum=0.0, maximum=1.0, default=0.50, step=0.05, label="Confidence Threshold"),
]

demo_app = gr.Interface(
    fn=yolov7_inference,
    inputs=inputs,
    outputs=gr.outputs.Image(type="filepath", label="Output Image"),
    title="Detection of jar lid defects (Yolov7)",
    description = "App detecting jar lids that are damaged (deformation, hole, scratch) versus intact. | Ruthger Righart ",
    article = "<p style='text-align: center'><a href='https://www.rrighart.com' target='_blank'>Webpage</a></p> <p style='text-align: center'><a href='https://www.kaggle.com/code/rrighart/detection-of-product-defects-using-yolov7' target='_blank'>Kaggle</a></p>",
    examples=[['t1.JPG', 0.50]],
    cache_examples=True,
)
demo_app.launch(debug=False, enable_queue=True)