diff --git a/composer/trainer/trainer.py b/composer/trainer/trainer.py index eb5080eaee..cb42094f37 100644 --- a/composer/trainer/trainer.py +++ b/composer/trainer/trainer.py @@ -1723,9 +1723,12 @@ def __init__( # Load Checkpoint self._rng_state = None # If autoresume is enabled, first check for existing checkpoints to load - if autoresume: + self.autoresume = autoresume + if self.autoresume: log.info('Searching for a previous checkpoint to autoresume') error_message = '' + if max_duration is None: + error_message += 'The `max_duration` must be specified on trainer.__init__ when autoresume is enabled. ' if save_folder is None: error_message += 'The `save_folder` must be specified when autoresume is enabled. ' if save_overwrite: @@ -2188,10 +2191,21 @@ def fit( # Reset Time if reset_time: + if self.autoresume: + raise ValueError( + 'Cannot specify `reset_time=True` when autoresume is enabled. Please instead ' + 'specify `load_ignore_keys` when constructing the Trainer, which will only ' + 'run on the initial load and not any subsequent autoresumptions.', + ) self.state.timestamp = Timestamp() # Max Duration if duration is not None: + if self.autoresume: + raise ValueError( + '`duration` cannot be specified when autoresume is enabled. Please instead ' + 'specify `max_duration` when constructing the Trainer.', + ) duration = ensure_time(duration, TimeUnit.EPOCH) if duration.unit == TimeUnit.SECOND: raise ValueError('Wall clock time not an allowed time unit.') diff --git a/tests/trainer/test_checkpoint.py b/tests/trainer/test_checkpoint.py index d23b55875f..dc887fa5e2 100644 --- a/tests/trainer/test_checkpoint.py +++ b/tests/trainer/test_checkpoint.py @@ -667,6 +667,7 @@ def get_trainer( max_duration: str = '2ep', latest_filename: str = 'latest-rank{rank}.pt', file_extension: str = '.pt', + use_scheduler: bool = True, **kwargs, ): if model is None: @@ -704,7 +705,7 @@ def get_trainer( save_filename='ep{epoch}' + file_extension, max_duration=max_duration, optimizers=optimizer, - schedulers=ExponentialScheduler(gamma=0.9), + schedulers=ExponentialScheduler(gamma=0.9) if use_scheduler else None, callbacks=callbacks, **kwargs, ) @@ -1212,24 +1213,43 @@ def test_load_weights_object_store(self, tmp_path): ) @pytest.mark.parametrize( - 'run_name,save_folder,save_overwrite,latest_filename', + 'run_name,save_folder,save_overwrite,latest_filename,max_duration', [ - [None, 'first', False, 'latest-rank{rank}.pt'], - ['big-chungus', None, False, 'latest-rank{rank}.pt'], - ['big-chungus', 'first', True, 'latest-rank{rank}.pt'], - ['big-chungus', 'first', False, None], + [None, 'first', False, 'latest-rank{rank}.pt', '2ep'], + ['big-chungus', None, False, 'latest-rank{rank}.pt', '2ep'], + ['big-chungus', 'first', True, 'latest-rank{rank}.pt', '2ep'], + ['big-chungus', 'first', False, None, '2ep'], + ['big-chungus', 'first', False, 'latest-rank{rank}.pt', None], ], ) - def test_autoresume_fail(self, run_name, save_folder, save_overwrite, latest_filename): + def test_autoresume_fail_init(self, run_name, save_folder, save_overwrite, latest_filename, max_duration): with pytest.raises(ValueError): self.get_trainer( latest_filename=latest_filename, save_overwrite=save_overwrite, save_folder=save_folder, run_name=run_name, + max_duration=max_duration, autoresume=True, + use_scheduler=False, ) + @pytest.mark.parametrize( + 'duration,reset_time', + [ + ['1ep', False], + [None, True], + ], + ) + def test_autoresume_fail_fit(self, duration: Optional[str], reset_time: bool): + trainer = self.get_trainer( + run_name='bigtrainer', + save_folder='first', + autoresume=True, + ) + with pytest.raises(ValueError): + trainer.fit(duration=duration, reset_time=reset_time) + def test_different_run_names(self): trainer_1 = self.get_trainer(