From 70e24086be1a26af38c0d1b0316d94a1aeafa7c4 Mon Sep 17 00:00:00 2001 From: kunwoopark Date: Thu, 1 Dec 2022 10:55:15 +0900 Subject: [PATCH 1/3] Set pyyaml requirements version to supported version 5.4.1(yaml.load() without Loader=... is deprecated in latest virsion) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 40e445981..72dbed81c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ numba torch>=1.1 tensorboardX easydict -pyyaml +pyyaml==5.4.1 scikit-image tqdm torchvision From b174f67e6251bcf942f5ce9bd7e54b9dbce47c8f Mon Sep 17 00:00:00 2001 From: kunwoopark Date: Thu, 1 Dec 2022 10:57:47 +0900 Subject: [PATCH 2/3] Correct default info option for once dataset and components inside ignore list to generate dataset properly(if download dataset without raw datas, dataset generator not working) --- pcdet/datasets/once/once_dataset.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pcdet/datasets/once/once_dataset.py b/pcdet/datasets/once/once_dataset.py index 0ff079843..7c6c3901f 100644 --- a/pcdet/datasets/once/once_dataset.py +++ b/pcdet/datasets/once/once_dataset.py @@ -389,7 +389,7 @@ def create_once_infos(dataset_cfg, class_names, data_path, save_path, workers=4) dataset = ONCEDataset(dataset_cfg=dataset_cfg, class_names=class_names, root_path=data_path, training=False) splits = ['train', 'val', 'test', 'raw_small', 'raw_medium', 'raw_large'] - ignore = ['test'] + ignore = ['test', 'raw_small', 'raw_medium', 'raw_large'] print('---------------Start to generate data infos---------------') for split in splits: @@ -416,7 +416,7 @@ def create_once_infos(dataset_cfg, class_names, data_path, save_path, workers=4) parser = argparse.ArgumentParser(description='arg parser') parser.add_argument('--cfg_file', type=str, default=None, help='specify the config of dataset') - parser.add_argument('--func', type=str, default='create_waymo_infos', help='') + parser.add_argument('--func', type=str, default='create_once_infos', help='') parser.add_argument('--runs_on', type=str, default='server', help='') args = parser.parse_args() @@ -441,4 +441,4 @@ def create_once_infos(dataset_cfg, class_names, data_path, save_path, workers=4) class_names=['Car', 'Bus', 'Truck', 'Pedestrian', 'Bicycle'], data_path=once_data_path, save_path=once_save_path - ) \ No newline at end of file + ) From 7b1b08dc3d3fb734b6f39f3a00e94a8fa3edb0d3 Mon Sep 17 00:00:00 2001 From: kunwoopark Date: Thu, 1 Dec 2022 11:33:15 +0900 Subject: [PATCH 3/3] Make sources to generate only existing datssets --- pcdet/datasets/once/once_dataset.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/pcdet/datasets/once/once_dataset.py b/pcdet/datasets/once/once_dataset.py index 7c6c3901f..92d6b330c 100644 --- a/pcdet/datasets/once/once_dataset.py +++ b/pcdet/datasets/once/once_dataset.py @@ -12,6 +12,7 @@ from ...utils import box_utils from .once_toolkits import Octopus + class ONCEDataset(DatasetTemplate): def __init__(self, dataset_cfg, class_names, training=True, root_path=None, logger=None): """ @@ -388,8 +389,14 @@ def evaluation(self, det_annos, class_names, **kwargs): def create_once_infos(dataset_cfg, class_names, data_path, save_path, workers=4): dataset = ONCEDataset(dataset_cfg=dataset_cfg, class_names=class_names, root_path=data_path, training=False) - splits = ['train', 'val', 'test', 'raw_small', 'raw_medium', 'raw_large'] - ignore = ['test', 'raw_small', 'raw_medium', 'raw_large'] + image_sets = save_path / Path("ImageSets") + exist_sets = set(list(map(lambda p: p.stem, list(image_sets.glob("*.txt"))))) + + splits = set(['train', 'val', 'test', 'raw_small', 'raw_medium', 'raw_large']) + ignore = splits - exist_sets + + splits = list(splits) + ignore = list(ignore) print('---------------Start to generate data infos---------------') for split in splits: