Gormery Kombo Wanjiru
attempt at making loadscript
57a7965
import os
import json
import cv2
from datasets import Dataset, DatasetDict, Features, Value, Array2D, Array3D, Sequence
from pathlib import Path
import numpy as np
# Define constants
VIDEO_EXTENSIONS = ['.avi']
JSON_EXTENSIONS = ['.json']
KEYPOINTS = [
"nose", "left_eye", "right_eye", "left_ear", "right_ear", "left_shoulder", "right_shoulder",
"left_elbow", "right_elbow", "left_wrist", "right_wrist", "left_hip", "right_hip",
"left_knee", "right_knee", "left_ankle", "right_ankle"
]
def load_video(video_path):
"""Reads a video file and returns a list of frames as NumPy arrays."""
cap = cv2.VideoCapture(video_path)
frames = []
while cap.isOpened():
ret, frame = cap.read()
if not ret:
break
frames.append(frame)
cap.release()
return np.array(frames)
def load_json(json_path):
"""Loads the JSON keypoint data for each frame."""
with open(json_path, 'r') as f:
data = json.load(f)
return data
def process_frame_data(frame_data):
"""Converts the frame's keypoints into a structured format."""
detections = []
# Check if 'data' exists in the frame_data
if 'detections' in frame_data:
for detection in frame_data['detections']: # Now using 'data' instead of 'detections'
if detection: # Check if there's any valid detection data
person = {
"confidence": detection.get("confidence", 0),
"box": detection.get("box", {}),
"keypoints": {
keypoint['label']: keypoint['coordinates']
for keypoint in detection.get('keypoints', [])
}
}
detections.append(person)
else:
print(f"Warning: Empty detection in frame {frame_data['frame_index']}")
else:
# Handle the case where 'data' is missing in the frame data
print(f"Warning: 'data' key missing in frame data: {frame_data}")
return detections
def get_file_paths(base_path, split="train"):
"""Returns video and JSON file paths."""
video_paths = []
json_paths = []
split_path = os.path.join(base_path, split)
for label in ['Fight', 'NonFight']:
label_path = os.path.join(split_path, label)
for video_folder in os.listdir(label_path):
video_folder_path = os.path.join(label_path, video_folder)
video_file = next((f for f in os.listdir(video_folder_path) if any(f.endswith(ext) for ext in VIDEO_EXTENSIONS)), None)
json_file = next((f for f in os.listdir(video_folder_path) if any(f.endswith(ext) for ext in JSON_EXTENSIONS)), None)
if video_file and json_file:
video_paths.append(os.path.join(video_folder_path, video_file))
json_paths.append(os.path.join(video_folder_path, json_file))
return video_paths, json_paths
def load_data(base_path, split="train"):
"""Loads and processes the data for a given split (train or val)."""
video_paths, json_paths = get_file_paths(base_path, split)
dataset = []
for video_path, json_path in zip(video_paths, json_paths):
# Load video frames
frames = load_video(video_path)
# Load JSON keypoints
keypoints_data = load_json(json_path)
# Process the data
frame_data = [process_frame_data(frame) for frame in keypoints_data]
# Construct the data record
dataset.append({
'video': frames,
'keypoints': frame_data,
'video_path': video_path,
'json_path': json_path
})
return dataset
def main():
# Path to the dataset directory
dataset_dir = '.' # Replace with your actual dataset path
# Load training and validation data
train_data = load_data(dataset_dir, split="train")
val_data = load_data(dataset_dir, split="val")
# Convert to Hugging Face Dataset
train_features = Features({
'video': Array3D(dtype='int32', shape=(None, None, None)), # None indicates variable sizes
'keypoints': Sequence(Features({
'person_id': Value('int32'),
'confidence': Value('float32'),
'box': {
'x1': Value('float32'),
'y1': Value('float32'),
'x2': Value('float32'),
'y2': Value('float32')
},
'keypoints': {key: Array2D(dtype='float32', shape=(2,)) for key in KEYPOINTS}
})),
'video_path': Value('string'),
'json_path': Value('string')
})
# Create DatasetDict
dataset_dict = DatasetDict({
'train': Dataset.from_dict(train_data, features=train_features),
'val': Dataset.from_dict(val_data, features=train_features)
})
# Save or push dataset to Hugging Face
dataset_dict.save_to_disk("keypoints_keyger")
# Or to upload: dataset_dict.push_to_hub("your_dataset_name")
if __name__ == "__main__":
main()