blumenstiel commited on
Commit
ae1b1f2
·
verified ·
1 Parent(s): 0393e96

Update terramesh.py

Browse files
Files changed (1) hide show
  1. terramesh.py +89 -47
terramesh.py CHANGED
@@ -81,10 +81,13 @@ def build_terramesh_dataset(
81
  transform: Callable = None,
82
  batch_size: int = 8,
83
  return_metadata: bool = False,
84
- shuffle: bool = True,
85
  shardshuffle: int = 100,
86
  deterministic: bool = False,
87
  seed: int = None,
 
 
 
88
  **kwargs,
89
  ):
90
  """
@@ -98,17 +101,26 @@ def build_terramesh_dataset(
98
  It requires batch_size=None in the data loader constructor.
99
  :param transform: Transform function to apply to the data, use MultimodalTransforms.
100
  :param return_metadata: Load center coordinates, timestamp (ns as int) and cloud mask (if available).
101
- :param shuffle: Shuffle samples and shards. Default to True.
102
  :param shardshuffle: The number of shards to shuffle, or None. Defaults to 100.
103
  :param deterministic: Whether to use deterministic shuffling. Defaults to False.
104
  :param seed: Random seed for shuffling. Defaults to None which uses random seeds.
105
  :param kwargs: Optional keyword arguments for single-modality which are passed to WebDataset constructor.
 
 
 
 
106
  :return: WebDataset (single modality) or DataPipeline (multiple modalities)
107
  """
108
  if len(modalities) == 1:
109
  # Single modality
110
  modalities = modalities[0]
111
 
 
 
 
 
 
112
  if isinstance(modalities, str):
113
  # Build standard WebDataset for single modality
114
  dataset = build_wds_dataset(
@@ -119,10 +131,11 @@ def build_terramesh_dataset(
119
  batch_size=batch_size,
120
  transform=transform,
121
  return_metadata=return_metadata,
122
- shuffle=shuffle,
123
  shardshuffle=shardshuffle,
124
  deterministic=deterministic,
125
  seed=seed,
 
 
126
  **kwargs
127
  )
128
  return dataset
@@ -140,10 +153,12 @@ def build_terramesh_dataset(
140
  batch_size=batch_size,
141
  transform=transform,
142
  return_metadata=return_metadata,
143
- shuffle=shuffle,
144
  shardshuffle=shardshuffle,
145
  deterministic=deterministic,
146
  seed=seed,
 
 
 
147
  )
148
  return dataset
149
 
@@ -193,14 +208,16 @@ def drop_time_dim(value, dim: int = 0):
193
  """
194
  Remove time dimension from data tensors.
195
  """
196
- if isinstance(value, np.ndarray) or isinstance(value, torch.Tensor):
197
  return value.squeeze(dim)
198
 
199
  elif isinstance(value, dict):
200
  for k, v in value.items():
201
- if isinstance(v, np.ndarray) or isinstance(v, torch.Tensor):
202
  value[k] = v.squeeze(dim)
203
  return value
 
 
204
 
205
 
206
  def build_wds_dataset(
@@ -210,12 +227,13 @@ def build_wds_dataset(
210
  urls: str | None = None,
211
  batch_size: int = 8,
212
  transform: Callable = None,
213
- shuffle: bool = True,
214
  return_metadata: bool = False,
215
  shardshuffle: int = 100,
216
  deterministic: bool = False,
217
  seed: int = None,
218
  empty_check: bool = False,
 
 
219
  *args, **kwargs
220
  ):
221
  if urls is None:
@@ -240,7 +258,7 @@ def build_wds_dataset(
240
  dataset = wds.WebDataset(
241
  urls,
242
  *args,
243
- shardshuffle=shardshuffle * shuffle, # Shuffle shard
244
  detshuffle=deterministic,
245
  seed=seed,
246
  handler=warn_and_continue,
@@ -254,21 +272,41 @@ def build_wds_dataset(
254
  dataset = dataset.map(zarr_metadata_decoder) if return_metadata else dataset.decode(zarr_decoder)
255
 
256
  # Rename modality to "image" and remove temporal dimension
257
- dataset = (dataset
258
- .rename(image="zarr.zip")
259
- .map(drop_time_dim)
260
- )
261
 
262
  if transform is not None:
263
  dataset = dataset.map(transform)
264
 
265
  # Create batches
266
  if batch_size is not None:
267
- dataset = dataset.batched(batch_size)
268
 
269
  return dataset
270
 
271
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
272
  def build_multimodal_dataset(
273
  path: str = "https://huggingface.co/datasets/ibm-esa-geospatial/TerraMesh/resolve/main/",
274
  modalities: list = None,
@@ -276,12 +314,14 @@ def build_multimodal_dataset(
276
  urls: str | None = None,
277
  batch_size: int = 8,
278
  transform: Callable = None,
279
- shuffle: bool = True,
280
  return_metadata: bool = False,
281
  shardshuffle: int = 100,
282
  deterministic: bool = False,
283
  seed: int = None,
284
  empty_check: bool = False,
 
 
 
285
  ):
286
  if modalities is None:
287
  modalities = ["S2L2A", "S2L1C", "S2RGB", "S1GRD", "S1RTC", "DEM", "NDVI", "LULC"] # Default
@@ -294,44 +334,46 @@ def build_multimodal_dataset(
294
  lst.remove(value)
295
  return lst
296
 
297
- majortom_mod = f"[{','.join(filter_list(modalities, 'S1GRD'))}]"
298
- ssl4eos12_mod = f"[{','.join(filter_list(modalities, 'S1RTC'))}]"
 
 
 
 
 
 
 
299
 
300
- # Joins majortom and ssl4eos12 shard files with "::"
301
- urls = (os.path.join(path, split, majortom_mod, split_files["majortom"][split][0])
302
- + "::" + os.path.join(path, split, ssl4eos12_mod, split_files["ssl4eos12"][split][0]))
 
 
 
 
 
 
 
 
 
 
 
303
 
304
- dataset = wds.DataPipeline(
305
- # Infinitely sample shards from the shard list with replacement. Each worker is seeded independently.
306
- (
307
- wds.ResampledShards(urls, deterministic=deterministic, seed=seed, empty_check=empty_check)
308
- if shuffle else wds.SimpleShardList(urls)
309
- ),
310
- wds.split_by_node,
311
- wds.split_by_worker,
312
- multi_tarfile_samples, # Extract individual samples from multi-modal tar files
313
- wds.shuffle(shardshuffle, seed=seed), # Shuffle with a buffer of given size
314
- (
315
- wds.map(zarr_metadata_decoder)
316
- if return_metadata
317
- else wds.decode(zarr_decoder) # Decode from bytes to numpy arrays, etc.
318
- ),
319
- wds.map(drop_time_dim), # Remove time dimension from tensors
320
- wds.map(remove_extensions), # Remove "file extensions" from dictionary keys
321
- ( # Apply transformation
322
- wds.map(transform)
323
- if transform is not None
324
- else wds.map(identity)
325
- ),
326
- ( # Batching
327
- wds.batched(batch_size, collation_fn=default_collate, partial=False)
328
- if batch_size is not None
329
- else wds.map(identity)
330
- ),
331
- )
332
  return dataset
333
 
334
 
 
 
 
 
 
 
 
 
 
 
 
 
335
  def extract_modality_names(s):
336
  """
337
  Function from https://github.com/apple/ml-4m/blob/main/fourm/data/unified_datasets.py.
 
81
  transform: Callable = None,
82
  batch_size: int = 8,
83
  return_metadata: bool = False,
84
+ shuffle: bool = None,
85
  shardshuffle: int = 100,
86
  deterministic: bool = False,
87
  seed: int = None,
88
+ time_dim: bool = False,
89
+ partial: bool = None,
90
+ probs: list[int] = None,
91
  **kwargs,
92
  ):
93
  """
 
101
  It requires batch_size=None in the data loader constructor.
102
  :param transform: Transform function to apply to the data, use MultimodalTransforms.
103
  :param return_metadata: Load center coordinates, timestamp (ns as int) and cloud mask (if available).
104
+ :param shuffle: Shuffle samples and shards. Default to True for train and False for val.
105
  :param shardshuffle: The number of shards to shuffle, or None. Defaults to 100.
106
  :param deterministic: Whether to use deterministic shuffling. Defaults to False.
107
  :param seed: Random seed for shuffling. Defaults to None which uses random seeds.
108
  :param kwargs: Optional keyword arguments for single-modality which are passed to WebDataset constructor.
109
+ :param empty_check: Check if shards are empty. Defaults to False.
110
+ :param time_dim: If True, keeps time dimension. Defaults to False.
111
+ :param partial: Load partial batch at the end. Defaults to False for train and True for val.
112
+ :param probs: List of probabilities for each subset (majortom and ssl4eos12). Defaults to [0.8, 0.2].
113
  :return: WebDataset (single modality) or DataPipeline (multiple modalities)
114
  """
115
  if len(modalities) == 1:
116
  # Single modality
117
  modalities = modalities[0]
118
 
119
+ # No shuffle and partial load for val
120
+ shuffle = partial if partial is not None else split != "val"
121
+ partial = partial if partial is not None else split == "val"
122
+ shardshuffle = shardshuffle * shuffle
123
+
124
  if isinstance(modalities, str):
125
  # Build standard WebDataset for single modality
126
  dataset = build_wds_dataset(
 
131
  batch_size=batch_size,
132
  transform=transform,
133
  return_metadata=return_metadata,
 
134
  shardshuffle=shardshuffle,
135
  deterministic=deterministic,
136
  seed=seed,
137
+ time_dim=time_dim,
138
+ partial=partial,
139
  **kwargs
140
  )
141
  return dataset
 
153
  batch_size=batch_size,
154
  transform=transform,
155
  return_metadata=return_metadata,
 
156
  shardshuffle=shardshuffle,
157
  deterministic=deterministic,
158
  seed=seed,
159
+ time_dim=time_dim,
160
+ partial=partial,
161
+ probs=probs,
162
  )
163
  return dataset
164
 
 
208
  """
209
  Remove time dimension from data tensors.
210
  """
211
+ if (isinstance(value, np.ndarray) or isinstance(value, torch.Tensor)) and value.shape[dim] == 1:
212
  return value.squeeze(dim)
213
 
214
  elif isinstance(value, dict):
215
  for k, v in value.items():
216
+ if (isinstance(v, np.ndarray) or isinstance(v, torch.Tensor)) and v.shape[dim] == 1:
217
  value[k] = v.squeeze(dim)
218
  return value
219
+ else:
220
+ return value
221
 
222
 
223
  def build_wds_dataset(
 
227
  urls: str | None = None,
228
  batch_size: int = 8,
229
  transform: Callable = None,
 
230
  return_metadata: bool = False,
231
  shardshuffle: int = 100,
232
  deterministic: bool = False,
233
  seed: int = None,
234
  empty_check: bool = False,
235
+ time_dim: bool = False,
236
+ partial: bool = False,
237
  *args, **kwargs
238
  ):
239
  if urls is None:
 
258
  dataset = wds.WebDataset(
259
  urls,
260
  *args,
261
+ shardshuffle=shardshuffle,
262
  detshuffle=deterministic,
263
  seed=seed,
264
  handler=warn_and_continue,
 
272
  dataset = dataset.map(zarr_metadata_decoder) if return_metadata else dataset.decode(zarr_decoder)
273
 
274
  # Rename modality to "image" and remove temporal dimension
275
+ dataset = dataset.rename(image="zarr.zip")
276
+
277
+ if not time_dim:
278
+ dataset = dataset.map(drop_time_dim)
279
 
280
  if transform is not None:
281
  dataset = dataset.map(transform)
282
 
283
  # Create batches
284
  if batch_size is not None:
285
+ dataset = dataset.batched(batch_size, partial=partial)
286
 
287
  return dataset
288
 
289
 
290
+ def _subset_pipeline(urls, *, batch_size, shardshuffle, deterministic, seed, empty_check,
291
+ return_metadata, transform, time_dim, partial):
292
+ return wds.DataPipeline(
293
+ wds.ResampledShards(urls, deterministic=deterministic, seed=seed, empty_check=empty_check)
294
+ if shardshuffle else wds.SimpleShardList(urls),
295
+ wds.split_by_node,
296
+ wds.split_by_worker,
297
+ # Extract individual samples from multi-modal tar files
298
+ multi_tarfile_samples,
299
+ wds.shuffle(shardshuffle, seed=seed),
300
+ # Decode from bytes to numpy arrays, etc.
301
+ (wds.map(zarr_metadata_decoder) if return_metadata else wds.decode(zarr_decoder)),
302
+ # Remove time dimension from tensors
303
+ wds.map(drop_time_dim) if not time_dim else wds.map(identity),
304
+ wds.map(remove_extensions),
305
+ wds.map(transform) if transform is not None else wds.map(identity),
306
+ wds.batched(batch_size, collation_fn=collate_fn, partial=partial),
307
+ )
308
+
309
+
310
  def build_multimodal_dataset(
311
  path: str = "https://huggingface.co/datasets/ibm-esa-geospatial/TerraMesh/resolve/main/",
312
  modalities: list = None,
 
314
  urls: str | None = None,
315
  batch_size: int = 8,
316
  transform: Callable = None,
 
317
  return_metadata: bool = False,
318
  shardshuffle: int = 100,
319
  deterministic: bool = False,
320
  seed: int = None,
321
  empty_check: bool = False,
322
+ time_dim: bool = True,
323
+ partial: bool = False,
324
+ probs: list[int] = None,
325
  ):
326
  if modalities is None:
327
  modalities = ["S2L2A", "S2L1C", "S2RGB", "S1GRD", "S1RTC", "DEM", "NDVI", "LULC"] # Default
 
334
  lst.remove(value)
335
  return lst
336
 
337
+ urls_majortom = os.path.join(path, split, f"[{','.join(filter_list(modalities, 'S1GRD'))}]",
338
+ split_files["majortom"][split][0])
339
+ urls_ssl4eos12 = os.path.join(path, split, f"[{','.join(filter_list(modalities, 'S1RTC'))}]",
340
+ split_files["ssl4eos12"][split][0])
341
+ else:
342
+ if "::" in urls:
343
+ urls_majortom, urls_ssl4eos12 = urls.split("::")
344
+ else:
345
+ urls_majortom = urls_ssl4eos12 = urls
346
 
347
+ ds_mt = _subset_pipeline(urls_majortom, batch_size=batch_size, shardshuffle=shardshuffle,
348
+ deterministic=deterministic, seed=seed, empty_check=empty_check,
349
+ return_metadata=return_metadata, transform=transform,
350
+ time_dim=time_dim, partial=partial)
351
+
352
+ ds_ssl = _subset_pipeline(urls_ssl4eos12, batch_size=batch_size, shardshuffle=shardshuffle,
353
+ deterministic=deterministic, seed=seed, empty_check=empty_check,
354
+ return_metadata=return_metadata, transform=transform,
355
+ time_dim=time_dim, partial=partial)
356
+
357
+ # mix batches (never mixes samples)
358
+ dataset = wds.RandomMix([ds_mt, ds_ssl], probs=probs or [0.8, 0.2],
359
+ longest=not shardshuffle # Load all samples if shuffle is false
360
+ )
361
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
362
  return dataset
363
 
364
 
365
+ def collate_fn(batch):
366
+ # Wrapper for debugging
367
+ try:
368
+ return default_collate(batch)
369
+ except Exception as e:
370
+ for s in batch:
371
+ print(s["__key__"])
372
+ print(s["__url__"])
373
+ print(s.keys())
374
+ raise e
375
+
376
+
377
  def extract_modality_names(s):
378
  """
379
  Function from https://github.com/apple/ml-4m/blob/main/fourm/data/unified_datasets.py.