|
|
|
import shutil |
|
import os |
|
from argparse import ArgumentParser, Namespace |
|
from pathlib import Path |
|
from loggez import loggez_logger as logger |
|
from tqdm import tqdm |
|
|
|
def _check_and_find(dataset_dir: Path, output_path: str, scene: str, _repr: str, |
|
stem: str, suffix: str) -> tuple[Path, Path] | None: |
|
"""handle all weirdnesses on how we store data. VRE uses npy/x.npy, but others have different format too.""" |
|
out_file = Path(f"{output_path}/{_repr}/{scene}_{stem}{suffix}") |
|
if (in_path := (dataset_dir / scene / _repr / f"{stem}{suffix}")).exists(): |
|
return in_path, out_file |
|
if (in_path := (dataset_dir / scene / _repr / "npy" / f"{stem}{suffix}")).exists(): |
|
return in_path, out_file |
|
|
|
try: |
|
int_stem = int(stem) |
|
except ValueError: |
|
return None |
|
|
|
if (in_path := (dataset_dir / scene / _repr / f"{int_stem:06d}{suffix}")).exists(): |
|
return in_path, out_file |
|
|
|
if (in_path := (dataset_dir / scene / _repr / "npy" / f"{int_stem:06d}{suffix}")).exists(): |
|
return in_path, out_file |
|
return None |
|
|
|
def check_and_gather_all_files(dataset_dir: Path, txt_data: list[tuple[str, str]], |
|
output_path: Path, suffix: str) -> dict[str, str]: |
|
"""returns a {in_dir/scene/repr/stem.suffix: out_dir/repr/scene/stem.suffix} dict based on dataset_dir""" |
|
assert f"{suffix}".startswith("."), suffix |
|
scene_reprs = {} |
|
symlinks_to_do = {} |
|
for scene, stem in tqdm(txt_data, desc="Gather data"): |
|
assert (dataset_dir / scene).exists(), f"Scene '{scene}' does not exist in '{dataset_dir}'" |
|
if scene not in scene_reprs: |
|
scene_reprs[scene] = [x.name for x in (dataset_dir / scene).iterdir() if x.is_dir()] |
|
n_found = 0 |
|
for _repr in scene_reprs[scene]: |
|
if (res := _check_and_find(dataset_dir, output_path, scene, _repr, stem, suffix)) is not None: |
|
in_file, out_file = res |
|
n_found += 1 |
|
symlinks_to_do[in_file] = out_file |
|
assert n_found > 0, f"Stem '{stem}' not found in any repr ({scene_reprs[scene]}) of scene '{scene}'" |
|
assert len(symlinks_to_do) > 0 |
|
logger.info(f"Gathered {len(symlinks_to_do)} symlinks to create") |
|
return symlinks_to_do |
|
|
|
def make_partitions_if_needed(symlinks_to_do: dict[str, str], partition_max_size: int) -> dict[str, str]: |
|
"""updated from out_dir/repr/0.npz to out_dir/repr/part_0/0.npz if needed""" |
|
symlinks_by_repr = {} |
|
for k, v in symlinks_to_do.items(): |
|
_repr = v.parent.name |
|
if _repr not in symlinks_by_repr: |
|
symlinks_by_repr[_repr] = {} |
|
symlinks_by_repr[_repr][k] = v |
|
|
|
new_symlinks_to_do = {} |
|
for _repr, repr_files in symlinks_by_repr.items(): |
|
if (count := len(repr_files)) <= partition_max_size: |
|
new_symlinks_to_do = {**new_symlinks_to_do, **repr_files} |
|
else: |
|
logger.info(f"Representation {_repr} has {count} items which > than {partition_max_size}. Partitioning.") |
|
n_parts = (count // partition_max_size) + (count % partition_max_size != 0) |
|
repr_files_as_tuple = tuple(repr_files.items()) |
|
for i in range(n_parts): |
|
part = repr_files_as_tuple[i * partition_max_size: (i + 1) * partition_max_size] |
|
for in_file, out_file in part: |
|
new_symlinks_to_do[in_file] = out_file.parent / f"part{i}" / out_file.name |
|
assert (a := len(new_symlinks_to_do)) == (b := len(symlinks_to_do)), (a, b) |
|
return new_symlinks_to_do |
|
|
|
def read_txt_data(txt_file: Path) -> list[tuple[str, str]]: |
|
"""reads the data from the txt file with format scene/stem""" |
|
f = open(txt_file, "r") |
|
res = [] |
|
for row in f.readlines(): |
|
assert len(split_row := row.strip().split("/")) == 2, row |
|
res.append(split_row) |
|
logger.info(f"Read {len(res)} paths.") |
|
return res |
|
|
|
def get_args() -> Namespace: |
|
"""cli args""" |
|
parser = ArgumentParser() |
|
parser.add_argument("dataset_dir", type=lambda p: Path(p).absolute()) |
|
parser.add_argument("--txt_file", type=Path) |
|
parser.add_argument("--output_path", "-o", type=lambda p: Path(p).absolute()) |
|
parser.add_argument("--overwrite", action="store_true") |
|
parser.add_argument("--copy_files", action="store_true") |
|
parser.add_argument("--partition_max_size", type=int, default=10000) |
|
args = parser.parse_args() |
|
if args.output_path.exists(): |
|
if args.overwrite: |
|
logger.info(f"{args.output_path} exists and --overwrite set, deleting the directory first") |
|
shutil.rmtree(args.output_path) |
|
else: |
|
logger.info(f"{args.output_path} exists but --overwrite not set. Will skip all existing files") |
|
assert args.dataset_dir.exists() and args.dataset_dir.is_dir(), f"'{args.dataset_dir}' doesn't exist." |
|
assert args.txt_file.exists(), f"'{args.txt_file}' doesn't exist." |
|
return args |
|
|
|
def main(args: Namespace): |
|
"""main fn""" |
|
logger.info(f"\n- In dir: {args.dataset_dir}\n- Out dir: {args.output_path} \n- Symlinks: {not args.copy_files}") |
|
args.output_path.mkdir(exist_ok=not args.overwrite, parents=True) |
|
txt_data = read_txt_data(args.txt_file) |
|
symlinks_to_do = check_and_gather_all_files(args.dataset_dir, txt_data, args.output_path, suffix=".npz") |
|
symlinks_to_do = make_partitions_if_needed(symlinks_to_do, args.partition_max_size) |
|
for in_file, out_file in tqdm(symlinks_to_do.items(), desc="copying" if args.copy_files else "symlinks"): |
|
Path(out_file).parent.mkdir(exist_ok=True, parents=True) |
|
if Path(out_file).exists(): |
|
continue |
|
if args.copy_files: |
|
shutil.copyfile(in_file, out_file, follow_symlinks=True) |
|
else: |
|
rel_path = f"{os.path.relpath(in_file.parent, out_file.parent)}/{in_file.name}" |
|
assert (pth := Path(f"{out_file.parent}/{rel_path}")).exists(), pth |
|
os.symlink(rel_path, out_file) |
|
|
|
if __name__ == "__main__": |
|
main(get_args()) |
|
|