Spaces:
Runtime error
Runtime error
File size: 1,178 Bytes
7b3616b b5b878b 7b3616b b5b878b 232a013 b5b878b 0d5d215 b5b878b 3a21812 b5b878b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
import gradio as gr
from transformers import DetrImageProcessor, DetrForObjectDetection
import torch
def anylize(img):
# input_image_path = os.path.join(os.getcwd(), img.get_data()[0].name)
# return input_image_path
image = img
processor = DetrImageProcessor.from_pretrained("facebook/detr-resnet-50")
model = DetrForObjectDetection.from_pretrained("Guy2/AirportSec-100epoch")
inputs = processor(images=image, return_tensors="pt")
outputs = model(**inputs)
#target_sizes = torch.tensor([image.size])
#target_sizes = torch.tensor([image.size[::-1]])
target_sizes = torch.tensor([image.shape[:2]])
results = processor.post_process_object_detection(outputs, target_sizes=target_sizes, threshold=0.9)[0]
print(f"results: {results}")
for score, label, box in zip(results["scores"], results["labels"], results["boxes"]):
box = [round(i, 2) for i in box.tolist()]
return(
f"Detected {model.config.id2label[label.item()]} with confidence "
f"{round(score.item(), 3)} at location {box}"
)
app = gr.Interface(fn=anylize, inputs="image", outputs="text")
app.launch() |