Spaces:
Runtime error
Runtime error
import gradio as gr | |
import torch | |
############### | |
def yolov7_inference( | |
image: gr.inputs.Image = None, | |
conf_threshold: gr.inputs.Slider = 0.50, | |
): | |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") | |
path = 'y7-prdef.pt' | |
model = torch.hub.load("WongKinYiu/yolov7","custom",f"{path}") | |
model.conf = conf_threshold | |
results = model([image], size=640) | |
return results.render()[0] | |
inputs = [ | |
gr.inputs.Image(type="pil", label="Input Image"), | |
gr.inputs.Slider(minimum=0.0, maximum=1.0, default=0.50, step=0.05, label="Confidence Threshold"), | |
] | |
demo_app = gr.Interface( | |
fn=yolov7_inference, | |
inputs=inputs, | |
outputs=gr.outputs.Image(type="filepath", label="Output Image"), | |
title="Detection of jar lid defects (Yolov7)", | |
description = "App detecting jar lids that are damaged (deformation, hole, scratch) versus intact. | Ruthger Righart ", | |
article = "<p style='text-align: center'><a href='https://www.rrighart.com' target='_blank'>Webpage</a></p> <p style='text-align: center'><a href='https://www.kaggle.com/code/rrighart/detection-of-product-defects-using-yolov7' target='_blank'>Kaggle</a></p>", | |
examples=[['t1.JPG', 0.50]], | |
cache_examples=True, | |
) | |
demo_app.launch(debug=False, enable_queue=True) | |