File size: 6,291 Bytes
92142d8
 
 
 
 
1488a42
92142d8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89b2653
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92142d8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89b2653
92142d8
 
dd19171
 
 
 
 
92142d8
 
 
 
 
 
 
dd19171
92142d8
 
89b2653
69c544e
92142d8
dd19171
 
92142d8
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
#!/usr/bin/env python3
import shutil
import os
from argparse import ArgumentParser, Namespace
from pathlib import Path
from loggez import loggez_logger as logger
from tqdm import tqdm

def _check_and_find(dataset_dir: Path, output_path: str, scene: str, _repr: str,
                    stem: str, suffix: str) -> tuple[Path, Path] | None:
    """handle all weirdnesses on how we store data. VRE uses npy/x.npy, but others have different format too."""
    out_file = Path(f"{output_path}/{_repr}/{scene}_{stem}{suffix}") # e:g. rgb/slanic_1.npz
    if (in_path := (dataset_dir / scene / _repr / f"{stem}{suffix}")).exists(): # e.g.: slanic/rgb/1.npz
        return in_path, out_file
    if (in_path := (dataset_dir / scene / _repr / "npy" / f"{stem}{suffix}")).exists(): # e.g.: slanic/rgb/npy/1.npz
        return in_path, out_file

    try:
        int_stem = int(stem)
    except ValueError:
        return None

    if (in_path := (dataset_dir / scene / _repr / f"{int_stem:06d}{suffix}")).exists(): # e.g.: slanic/rgb/000001.npz
        return in_path, out_file
    # e.g.: slanic/rgb/npy/000001.npz
    if (in_path := (dataset_dir / scene / _repr / "npy" / f"{int_stem:06d}{suffix}")).exists():
        return in_path, out_file
    return None

def check_and_gather_all_files(dataset_dir: Path, txt_data: list[tuple[str, str]],
                               output_path: Path, suffix: str) -> dict[str, str]:
    """returns a {in_dir/scene/repr/stem.suffix: out_dir/repr/scene/stem.suffix} dict based on dataset_dir"""
    assert f"{suffix}".startswith("."), suffix
    scene_reprs = {}
    symlinks_to_do = {}
    for scene, stem in tqdm(txt_data, desc="Gather data"):
        assert (dataset_dir / scene).exists(), f"Scene '{scene}' does not exist in '{dataset_dir}'"
        if scene not in scene_reprs:
            scene_reprs[scene] = [x.name for x in (dataset_dir / scene).iterdir() if x.is_dir()]
        n_found = 0
        for _repr in scene_reprs[scene]:
            if (res := _check_and_find(dataset_dir, output_path, scene, _repr, stem, suffix)) is not None:
                in_file, out_file = res
                n_found += 1
                symlinks_to_do[in_file] = out_file
        assert n_found > 0, f"Stem '{stem}' not found in any repr ({scene_reprs[scene]}) of scene '{scene}'"
    assert len(symlinks_to_do) > 0
    logger.info(f"Gathered {len(symlinks_to_do)} symlinks to create")
    return symlinks_to_do

def make_partitions_if_needed(symlinks_to_do: dict[str, str], partition_max_size: int) -> dict[str, str]:
    """updated from out_dir/repr/0.npz to out_dir/repr/part_0/0.npz if needed"""
    symlinks_by_repr = {} # gather as {repr: {in_file: out_file}}
    for k, v in symlinks_to_do.items():
        _repr = v.parent.name
        if _repr not in symlinks_by_repr:
            symlinks_by_repr[_repr] = {}
        symlinks_by_repr[_repr][k] = v

    new_symlinks_to_do = {}
    for _repr, repr_files in symlinks_by_repr.items():
        if (count := len(repr_files)) <= partition_max_size:
            new_symlinks_to_do = {**new_symlinks_to_do, **repr_files}
        else:
            logger.info(f"Representation {_repr} has {count} items which > than {partition_max_size}. Partitioning.")
            n_parts = (count // partition_max_size) + (count % partition_max_size != 0)
            repr_files_as_tuple = tuple(repr_files.items())
            for i in range(n_parts):
                part = repr_files_as_tuple[i * partition_max_size: (i + 1) * partition_max_size]
                for in_file, out_file in part: # add the partition subdir
                    new_symlinks_to_do[in_file] = out_file.parent / f"part{i}" / out_file.name
    assert (a := len(new_symlinks_to_do)) == (b := len(symlinks_to_do)), (a, b)
    return new_symlinks_to_do

def read_txt_data(txt_file: Path) -> list[tuple[str, str]]:
    """reads the data from the txt file with format scene/stem"""
    f = open(txt_file, "r")
    res = []
    for row in f.readlines():
        assert len(split_row := row.strip().split("/")) == 2, row
        res.append(split_row)
    logger.info(f"Read {len(res)} paths.")
    return res

def get_args() -> Namespace:
    """cli args"""
    parser = ArgumentParser()
    parser.add_argument("dataset_dir", type=lambda p: Path(p).absolute())
    parser.add_argument("--txt_file", type=Path)
    parser.add_argument("--output_path", "-o", type=lambda p: Path(p).absolute())
    parser.add_argument("--overwrite", action="store_true")
    parser.add_argument("--copy_files", action="store_true")
    parser.add_argument("--partition_max_size", type=int, default=10000) # thanks huggingface for this
    args = parser.parse_args()
    if args.output_path.exists():
        if args.overwrite:
            logger.info(f"{args.output_path} exists and --overwrite set, deleting the directory first")
            shutil.rmtree(args.output_path)
        else:
            logger.info(f"{args.output_path} exists but --overwrite not set. Will skip all existing files")
    assert args.dataset_dir.exists() and args.dataset_dir.is_dir(), f"'{args.dataset_dir}' doesn't exist."
    assert args.txt_file.exists(), f"'{args.txt_file}' doesn't exist."
    return args

def main(args: Namespace):
    """main fn"""
    logger.info(f"\n- In dir: {args.dataset_dir}\n- Out dir: {args.output_path} \n- Symlinks: {not args.copy_files}")
    args.output_path.mkdir(exist_ok=not args.overwrite, parents=True)
    txt_data = read_txt_data(args.txt_file)
    symlinks_to_do = check_and_gather_all_files(args.dataset_dir, txt_data, args.output_path, suffix=".npz")
    symlinks_to_do = make_partitions_if_needed(symlinks_to_do, args.partition_max_size)
    for in_file, out_file in tqdm(symlinks_to_do.items(), desc="copying" if args.copy_files else "symlinks"):
        Path(out_file).parent.mkdir(exist_ok=True, parents=True)
        if Path(out_file).exists():
            continue
        if args.copy_files:
            shutil.copyfile(in_file, out_file, follow_symlinks=True)
        else:
            rel_path = f"{os.path.relpath(in_file.parent, out_file.parent)}/{in_file.name}"
            assert (pth := Path(f"{out_file.parent}/{rel_path}")).exists(), pth
            os.symlink(rel_path, out_file)

if __name__ == "__main__":
    main(get_args())