Meehai commited on
Commit
89b2653
1 Parent(s): 92142d8

changes to symlinks to account for hf stuff

Browse files
.gitattributes CHANGED
@@ -1,2 +1,6 @@
1
  *.jpg filter=lfs diff=lfs merge=lfs -text
2
  *.png filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
1
  *.jpg filter=lfs diff=lfs merge=lfs -text
2
  *.png filter=lfs diff=lfs merge=lfs -text
3
+ *.npy filter=lfs diff=lfs merge=lfs -text
4
+ *.npz filter=lfs diff=lfs merge=lfs -text
5
+ data/ filter=lfs diff=lfs merge=lfs -text
6
+
dronescapes_reader/multitask_dataset.py CHANGED
@@ -128,13 +128,19 @@ class MultiTaskDataset(Dataset):
128
  all_repr_dirs: list[str] = [x.name for x in self.path.iterdir() if x.is_dir()]
129
  for repr_dir_name in all_repr_dirs:
130
  dir_name = self.path / repr_dir_name
131
- items = natsorted(dir_name.glob(f"*.{self.suffix}"), key=lambda x: x.name) # important: use natsorted() here
132
- in_files[repr_dir_name] = items
 
 
 
 
 
133
  assert not any(len(x) == 0 for x in in_files.values()), f"{ [k for k, v in in_files.items() if len(v) == 0] }"
134
  return in_files
135
 
136
  def _build_dataset_drop(self) -> BuildDatasetTuple:
137
  in_files = self._get_all_npz_files()
 
138
  common = set(x.name for x in next(iter(in_files.values())))
139
  nodes = in_files.keys()
140
  for node in nodes:
@@ -142,11 +148,13 @@ class MultiTaskDataset(Dataset):
142
  assert len(common) > 0, f"Node '{node}' made the intersection null"
143
  common = natsorted(list(common))
144
  logger.info(f"Found {len(common)} data points for each node ({len(nodes)} nodes).")
145
- files_per_repr = {node: [self.path / node / x for x in common] for node in nodes}
 
146
  return files_per_repr, common
147
 
148
  def _build_dataset_fill_none(self) -> BuildDatasetTuple:
149
  in_files = self._get_all_npz_files()
 
150
  all_files = set(x.name for x in next(iter(in_files.values())))
151
  nodes = in_files.keys()
152
  for node in nodes:
@@ -158,8 +166,9 @@ class MultiTaskDataset(Dataset):
158
  in_file_names = {node: [f.name for f in in_files[node]] for node in nodes}
159
  for node in nodes:
160
  for file_name in all_files:
161
- file_path = self.path / node / file_name if file_name in in_file_names[node] else None
162
  files_per_repr[node].append(file_path)
 
163
  return files_per_repr, all_files
164
 
165
  def _build_dataset(self) -> BuildDatasetTuple:
 
128
  all_repr_dirs: list[str] = [x.name for x in self.path.iterdir() if x.is_dir()]
129
  for repr_dir_name in all_repr_dirs:
130
  dir_name = self.path / repr_dir_name
131
+ if all(f.is_dir() for f in dir_name.iterdir()): # dataset is stored as repr/part_x/0.npz, ..., part_k/n.npz
132
+ all_files = []
133
+ for part in dir_name.iterdir():
134
+ all_files.extend(part.glob(f"*.{self.suffix}"))
135
+ else: # dataset is stored as repr/0.npz, ..., repr/n.npz
136
+ all_files = dir_name.glob(f"*.{self.suffix}")
137
+ in_files[repr_dir_name] = natsorted(all_files, key=lambda x: x.name) # important: use natsorted() here
138
  assert not any(len(x) == 0 for x in in_files.values()), f"{ [k for k, v in in_files.items() if len(v) == 0] }"
139
  return in_files
140
 
141
  def _build_dataset_drop(self) -> BuildDatasetTuple:
142
  in_files = self._get_all_npz_files()
143
+ name_to_node_path = {k: {_v.name: _v for _v in v} for k, v in in_files.items()} # {node: {name: path}}
144
  common = set(x.name for x in next(iter(in_files.values())))
145
  nodes = in_files.keys()
146
  for node in nodes:
 
148
  assert len(common) > 0, f"Node '{node}' made the intersection null"
149
  common = natsorted(list(common))
150
  logger.info(f"Found {len(common)} data points for each node ({len(nodes)} nodes).")
151
+ files_per_repr = {node: [name_to_node_path[node][x] for x in common] for node in nodes}
152
+ assert len(files_per_repr) > 0
153
  return files_per_repr, common
154
 
155
  def _build_dataset_fill_none(self) -> BuildDatasetTuple:
156
  in_files = self._get_all_npz_files()
157
+ name_to_node_path = {k: {_v.name: _v for _v in v} for k, v in in_files.items()}
158
  all_files = set(x.name for x in next(iter(in_files.values())))
159
  nodes = in_files.keys()
160
  for node in nodes:
 
166
  in_file_names = {node: [f.name for f in in_files[node]] for node in nodes}
167
  for node in nodes:
168
  for file_name in all_files:
169
+ file_path = name_to_node_path[node].get(file_name, None)
170
  files_per_repr[node].append(file_path)
171
+ assert len(files_per_repr) > 0
172
  return files_per_repr, all_files
173
 
174
  def _build_dataset(self) -> BuildDatasetTuple:
symlinks_from_txt_list.py CHANGED
@@ -48,6 +48,30 @@ def check_and_gather_all_files(dataset_dir: Path, txt_data: list[tuple[str, str]
48
  logger.info(f"Gathered {len(symlinks_to_do)} symlinks to create")
49
  return symlinks_to_do
50
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  def read_txt_data(txt_file: Path) -> list[tuple[str, str]]:
52
  """reads the data from the txt file with format scene/stem"""
53
  f = open(txt_file, "r")
@@ -66,6 +90,7 @@ def get_args() -> Namespace:
66
  parser.add_argument("--output_path", "-o", type=lambda p: Path(p).absolute())
67
  parser.add_argument("--overwrite", action="store_true")
68
  parser.add_argument("--copy_files", action="store_true")
 
69
  args = parser.parse_args()
70
  assert not args.output_path.exists() or args.overwrite, f"'{args.output_path}' exists. Use --overwrite."
71
  if args.output_path.exists():
@@ -80,6 +105,7 @@ def main(args: Namespace):
80
  args.output_path.mkdir(exist_ok=False, parents=True)
81
  txt_data = read_txt_data(args.txt_file)
82
  symlinks_to_do = check_and_gather_all_files(args.dataset_dir, txt_data, args.output_path, suffix=".npz")
 
83
  for in_file, out_file in tqdm(symlinks_to_do.items(), desc="symlinks"):
84
  Path(out_file).parent.mkdir(exist_ok=True, parents=True)
85
  if args.copy_files:
 
48
  logger.info(f"Gathered {len(symlinks_to_do)} symlinks to create")
49
  return symlinks_to_do
50
 
51
+ def make_partitions_if_needed(symlinks_to_do: dict[str, str], partition_max_size: int) -> dict[str, str]:
52
+ """updated from out_dir/repr/0.npz to out_dir/repr/part_0/0.npz if needed"""
53
+ symlinks_by_repr = {} # gather as {repr: {in_file: out_file}}
54
+ for k, v in symlinks_to_do.items():
55
+ _repr = v.parent.name
56
+ if _repr not in symlinks_by_repr:
57
+ symlinks_by_repr[_repr] = {}
58
+ symlinks_by_repr[_repr][k] = v
59
+
60
+ new_symlinks_to_do = {}
61
+ for _repr, repr_files in symlinks_by_repr.items():
62
+ if (count := len(repr_files)) <= partition_max_size:
63
+ new_symlinks_to_do = {**new_symlinks_to_do, **repr_files}
64
+ else:
65
+ logger.info(f"Representation {_repr} has {count} items which > than {partition_max_size}. Partitioning.")
66
+ n_parts = (count // partition_max_size) + (count % partition_max_size != 0)
67
+ repr_files_as_tuple = tuple(repr_files.items())
68
+ for i in range(n_parts):
69
+ part = repr_files_as_tuple[i * partition_max_size: (i + 1) * partition_max_size]
70
+ for in_file, out_file in part: # add the partition subdir
71
+ new_symlinks_to_do[in_file] = out_file.parent / f"part{i}" / out_file.name
72
+ assert (a := len(new_symlinks_to_do)) == (b := len(symlinks_to_do)), (a, b)
73
+ return new_symlinks_to_do
74
+
75
  def read_txt_data(txt_file: Path) -> list[tuple[str, str]]:
76
  """reads the data from the txt file with format scene/stem"""
77
  f = open(txt_file, "r")
 
90
  parser.add_argument("--output_path", "-o", type=lambda p: Path(p).absolute())
91
  parser.add_argument("--overwrite", action="store_true")
92
  parser.add_argument("--copy_files", action="store_true")
93
+ parser.add_argument("--partition_max_size", type=int, default=10000) # thanks huggingface for this
94
  args = parser.parse_args()
95
  assert not args.output_path.exists() or args.overwrite, f"'{args.output_path}' exists. Use --overwrite."
96
  if args.output_path.exists():
 
105
  args.output_path.mkdir(exist_ok=False, parents=True)
106
  txt_data = read_txt_data(args.txt_file)
107
  symlinks_to_do = check_and_gather_all_files(args.dataset_dir, txt_data, args.output_path, suffix=".npz")
108
+ symlinks_to_do = make_partitions_if_needed(symlinks_to_do, args.partition_max_size)
109
  for in_file, out_file in tqdm(symlinks_to_do.items(), desc="symlinks"):
110
  Path(out_file).parent.mkdir(exist_ok=True, parents=True)
111
  if args.copy_files: