Datasets:

License:
versae commited on
Commit
81dd00f
1 Parent(s): 8981afe

Create new file

Browse files
Files changed (1) hide show
  1. convert_to_mp3_and_shard.py +136 -0
convert_to_mp3_and_shard.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import tarfile
3
+ import time
4
+ from contextlib import closing
5
+ from io import BytesIO
6
+ from pathlib import Path
7
+
8
+ from pydub import AudioSegment
9
+ from tqdm import tqdm
10
+
11
+
12
+ MIC_TYPES = (("close", 1), ("distant", 2))
13
+
14
+ def convert_to_mp3(split, audio_dir, metadata_dir, output_dir, prefix=None):
15
+ if prefix and not prefix.endswith("_"):
16
+ prefix += "_"
17
+ else:
18
+ prefix = ""
19
+ paths = Path(metadata_dir).glob("*.json")
20
+ audio_dir = Path(audio_dir)
21
+ output_dir = Path(output_dir)
22
+ with (open(f"{prefix}{split}.jsonl", "w") as metadata_jsonlines,
23
+ open(f"{prefix}{split}.log", "w") as metadata_log):
24
+ for path in tqdm(paths):
25
+ metadata = json.load(path.open())
26
+ for recording in tqdm(metadata.get("val_recordings", [])):
27
+ MIC_TYPES = (("close", 1), ("distant", 2))
28
+ mic_segments = [None, None]
29
+ for mic_position, mic_index in MIC_TYPES:
30
+ audio_filename = audio_dir / metadata["pid"] / f'{metadata["pid"]}_{recording["file"].replace(".wav", f"-{mic_index}.wav")}'
31
+ metadata_jsonl = {
32
+ "pid": metadata["pid"],
33
+ **metadata["info"],
34
+ **metadata["session"],
35
+ **metadata["system"],
36
+ **recording
37
+ }
38
+ try:
39
+ segment = AudioSegment.from_file(audio_filename.as_posix())
40
+ mic_segments[mic_index - 1] = segment
41
+ except FileNotFoundError:
42
+ message = f"File {audio_filename.as_posix()} ({mic_position}) not found for split {split}."
43
+ print(message)
44
+ metadata_log.write(message + "\n")
45
+ if all(mic_segments):
46
+ metadata_jsonlines.write(json.dumps(metadata_jsonl) + "\n")
47
+ for idx, segment in enumerate(mic_segments):
48
+ mic_position = MIC_TYPES[idx][0]
49
+ export_filename = output_dir / split / mic_position / f"{metadata_jsonl['pid']}_{metadata_jsonl['file'].replace('.wav', '.mp3')}"
50
+ export_filename.parent.mkdir(parents=True, exist_ok=True)
51
+ segment.export(export_filename.as_posix(), format="mp3")
52
+
53
+
54
+ def human_size(bytes, units=[' bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB']):
55
+ """ Returns a human readable string representation of bytes """
56
+ return str(bytes) + units[0] if bytes < 1024 else human_size(bytes>>10, units[1:])
57
+
58
+
59
+ def create_shards(split, mp3_dir, shard_dir, size, prefix=None):
60
+ if prefix and not prefix.endswith("_"):
61
+ prefix += "_"
62
+ else:
63
+ prefix = ""
64
+ mp3_dir = Path(mp3_dir)
65
+ shard_dir = Path(shard_dir)
66
+ for mic_position, _ in MIC_TYPES:
67
+ with open(f"{prefix}{split}.jsonl", "r") as metadata_jsonlines_file:
68
+ metadata_jsonlines = iter(metadata_jsonlines_file)
69
+ shard_count = 0
70
+ shard_jsonlines = []
71
+ shard_size = 0
72
+ metadata = True
73
+ while metadata:
74
+ shard_name = f"{prefix}{split}_{mic_position}-{(shard_count + 1):04d}.tar.gz"
75
+ print(f"Shard {(shard_dir / split / shard_name).as_posix()}")
76
+ (shard_dir / split).mkdir(parents=True, exist_ok=True)
77
+ with tarfile.open(shard_dir / split / shard_name, "w:gz") as shard:
78
+ pbar = tqdm()
79
+ while shard_size < size:
80
+ try:
81
+ metadata = json.loads(next(metadata_jsonlines))
82
+ except StopIteration:
83
+ metadata = None
84
+ shard_size = size
85
+ if metadata:
86
+ audio_file = mp3_dir / split / mic_position / f"{metadata['pid']}_{metadata['file'].replace('.wav', '.mp3')}"
87
+ shard.add(audio_file.as_posix(), arcname=".")
88
+ shard_size += audio_file.stat().st_size
89
+ shard_jsonlines.append(json.dumps(metadata))
90
+ pbar.set_description(f"Processing {audio_file.as_posix()} [{human_size(shard_size)}]")
91
+ pbar.update(1)
92
+ if shard_jsonlines:
93
+ pbar.set_description(f"Processing metadata [{len(shard_jsonlines)} entries]")
94
+ pbar.update(1)
95
+ shard_jsonlines_str = "\n".join(shard_jsonlines)
96
+ with closing(BytesIO(shard_jsonlines_str.encode('utf-8'))) as metadata_file:
97
+ tarinfo = tarfile.TarInfo("metadata.json")
98
+ tarinfo.size = len(metadata_file.getvalue())
99
+ tarinfo.mtime = time.time()
100
+ shard.addfile(tarinfo, fileobj=metadata_file)
101
+ with open(shard_dir / split / shard_name.replace(".tar.gz", ".json"), "w") as metadata_file:
102
+ metadata_file.write(shard_jsonlines_str)
103
+ shard_size = 0
104
+ shard_jsonlines = []
105
+ shard_count += 1
106
+ print("Renaming...")
107
+ for shard_index in range(shard_count):
108
+ shard_name = f"{prefix}{split}_{mic_position}-{(shard_index + 1):04d}.tar.gz"
109
+ (shard_dir / split / shard_name).rename(shard_dir / split / shard_name.replace(".tar.gz", f"-of-{shard_count:04d}.tar.gz"))
110
+ shard_json = f"{prefix}{split}_{mic_position}-{(shard_index + 1):04d}.json"
111
+ (shard_dir / split / shard_json).rename(shard_dir / split / shard_json.replace(".json", f"-of-{shard_count:04d}.json"))
112
+
113
+
114
+
115
+ def main():
116
+ audio_dir = "audio/no"
117
+ mp3_dir = "dataset_mp3"
118
+ shard_dir = "dataset_shards"
119
+ split_paths = {
120
+ "train": "metadata/ADB_NOR_0463",
121
+ "test": "metadata/ADB_NOR_0464",
122
+ }
123
+ prefix_dataset = "dataset"
124
+ for split, metadata_dir in split_paths.items():
125
+ convert_to_mp3(split, metadata_dir, audio_dir, output_dir=mp3_dir, prefix=prefix_dataset)
126
+
127
+ prefix = "nst_no"
128
+ print(f"""Dedup leaked files
129
+ $ comm -23 <(sort {prefix_dataset}_train.jsonl) <(sort {prefix_dataset}_test.jsonl) | shuf > {prefix}_train.jsonl
130
+ $ shuf {prefix_dataset}_test.jsonl -o {prefix}_test.jsonl
131
+ """)
132
+ input("Enter [Enter] when dedupe is done.")
133
+ for split, metadata_dir in split_paths.items():
134
+ shard_size = 500 * 1024 * 1024 # ~500MB
135
+ create_shards(split, mp3_dir, shard_dir, shard_size, prefix="nst_no")
136
+