randomshit11's picture
Update main.py
5123e56 verified
raw
history blame
2.14 kB
import gradio as gr
from ultralytics import YOLO
from ultralytics.solutions import ai_gym
# import ai_gym
import cv2
def process(video_path, pose_type):
model = YOLO("yolov8n-pose.pt")
cap = cv2.VideoCapture(video_path)
assert cap.isOpened(), "Error reading video file"
w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))
video_writer = cv2.VideoWriter("output_video.mp4",
cv2.VideoWriter_fourcc(*'mp4v'),
fps,
(w, h))
gym_object = ai_gym.AIGym() # init AI GYM module
gym_object.set_args(line_thickness=2,
view_img=False, # Set view_img to False to prevent displaying the video in real-time
pose_type=pose_type,
kpts_to_check=[6, 8, 10])
frame_count = 0
while cap.isOpened():
success, im0 = cap.read()
if not success:
print("Video processing has been successfully completed.")
break
frame_count += 1
results = model.track(im0, verbose=True) # Tracking recommended
im0 = gym_object.start_counting(im0, results, frame_count)
video_writer.write(im0)
cap.release()
video_writer.release()
cv2.destroyAllWindows()
return "output_video.mp4"
title = "Workout - Monitoring"
description = "This Space counts the number of exercise in a video."
inputs = [
gr.Video(label='Input Video'),
gr.Radio(["pullup", "pushup", "abworkout"], label="Pose Type")
]
outputs = gr.Video(label='Output Video')
# example_list = [['Examples/PULL-UPS.mp4'],['Examples/PUSH-UPS.mp4']]
# example_list = ['Examples/PULL-UPS.mp4']
# Create the Gradio demo
demo = gr.Interface(fn=process,
inputs=inputs,
outputs=outputs,
title=title,
description=description
# examples=example_list,
# cache_examples=True
)
# Launch the demo!
demo.launch(show_api=True)