|
92 | 92 | "remove_keys", |
93 | 93 | "remove_extra_metadata", |
94 | 94 | "get_extra_metadata_keys", |
95 | | - # "PICKLE_KEY_SUFFIX", |
96 | 95 | "is_no_channel", |
97 | 96 | ] |
98 | 97 |
|
@@ -417,32 +416,6 @@ def dev_collate(batch, level: int = 1, logger_name: str = "dev_collate"): |
417 | 416 | return |
418 | 417 |
|
419 | 418 |
|
420 | | -# PICKLE_KEY_SUFFIX = TraceKeys.KEY_SUFFIX |
421 | | - |
422 | | - |
423 | | -# def pickle_operations(data, key=PICKLE_KEY_SUFFIX, is_encode: bool = True): |
424 | | -# """ |
425 | | -# Applied_operations are dictionaries with varying sizes, this method converts them to bytes so that we can (de-)collate. |
426 | | - |
427 | | -# Args: |
428 | | -# data: a list or dictionary with substructures to be pickled/unpickled. |
429 | | -# key: the key suffix for the target substructures, defaults to "_transforms" (`data.utils.PICKLE_KEY_SUFFIX`). |
430 | | -# is_encode: whether it's encoding using pickle.dumps (True) or decoding using pickle.loads (False). |
431 | | -# """ |
432 | | -# if isinstance(data, Mapping): |
433 | | -# data = dict(data) |
434 | | -# for k in data: |
435 | | -# if f"{k}".endswith(key): |
436 | | -# if is_encode and not isinstance(data[k], bytes): |
437 | | -# data[k] = pickle.dumps(data[k], 0) |
438 | | -# if not is_encode and isinstance(data[k], bytes): |
439 | | -# data[k] = pickle.loads(data[k]) |
440 | | -# return {k: pickle_operations(v, key=key, is_encode=is_encode) for k, v in data.items()} |
441 | | -# elif isinstance(data, (list, tuple)): |
442 | | -# return [pickle_operations(item, key=key, is_encode=is_encode) for item in data] |
443 | | -# return data |
444 | | - |
445 | | - |
446 | 419 | def collate_meta_tensor_fn(batch, *, collate_fn_map=None): |
447 | 420 | """ |
448 | 421 | Collate a sequence of meta tensor into a single batched metatensor. This is called by `collage_meta_tensor` |
|
0 commit comments