Spaces:
Runtime error
Runtime error
File size: 1,726 Bytes
61ab183 4cab89e 61ab183 4cab89e 61ab183 8bac5ee 61ab183 4cab89e 61ab183 32b4350 4cab89e 32b4350 61ab183 32b4350 dfcb3df 32b4350 8bac5ee 4cab89e 32b4350 dfcb3df 61ab183 4cab89e dfcb3df |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
import gradio as gr
import torch
#############
def yolov7_inference(
image: gr.Image = None,
conf_threshold: gr.Slider = 0.20,
):
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
path = 'y7-prdef.pt'
model = torch.hub.load("WongKinYiu/yolov7","custom",f"{path}")
model.conf = conf_threshold
results = model([image], size=640)
return results.render()[0]
inputs = [
gr.Image(type="filepath", label="Input"),
gr.Slider(minimum=0.0, maximum=1.0, value=0.2, step=0.05, label="Confidence Threshold", interactive=True),
]
outputs = [
gr.Image(type="filepath"),
]
css = ".output_image {height: 40rem !important; width: 100% !important;}"
demo_app = gr.Interface(
fn=yolov7_inference,
inputs=inputs,
outputs=outputs,
title="Fast detection of jar lid defects using Yolov7",
description = """
This application is detecting damaged jar lids. Type of damages include deformations, holes or scratches. The object detection notebook can be found at <a href="https://www.kaggle.com/rrighart">Kaggle</a>
Contact: Ruthger Righart
Email: [email protected]
Web: <a href="https://www.rrighart.com">www.rrighart.com</a>
""",
article = "<p style='text-align: center'><a href='https://www.rrighart.com' target='_blank'>Webpage</a></p> <p style='text-align: center'><a href='https://www.kaggle.com/code/rrighart/detection-of-product-defects-using-yolov7' target='_blank'>Kaggle</a></p>",
examples = [['example1.JPG'], ['example2.JPG'], ['example3.JPG']],
#examples = [['example1.JPG', 0.50], ['example2.JPG', 0.50], ['example3.JPG', 0.50]],
css=css,
cache_examples=True,
)
demo_app.queue().launch(debug=False)
|