Skip to content

Commit 3a55c5f

Browse files
committed
Merge branch 'bugfix_model_checkpoint_save_last_not_exists' of https://github.com/SkafteNicki/pytorch-lightning into bugfix_model_checkpoint_save_last_not_exists
2 parents 9c11ca3 + de3b0f7 commit 3a55c5f

File tree

2 files changed

+14
-10
lines changed

2 files changed

+14
-10
lines changed

src/lightning/pytorch/callbacks/model_checkpoint.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -381,8 +381,9 @@ def on_train_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModu
381381
if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0:
382382
self._save_topk_checkpoint(trainer, monitor_candidates)
383383
# Only save last checkpoint if a checkpoint was actually saved in this step or if save_last="link"
384-
if (self._last_global_step_saved == trainer.global_step or
385-
(self.save_last == "link" and self._last_checkpoint_saved)):
384+
if self._last_global_step_saved == trainer.global_step or (
385+
self.save_last == "link" and self._last_checkpoint_saved
386+
):
386387
self._save_last_checkpoint(trainer, monitor_candidates)
387388

388389
@override
@@ -401,8 +402,9 @@ def on_validation_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModul
401402
if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0:
402403
self._save_topk_checkpoint(trainer, monitor_candidates)
403404
# Only save last checkpoint if a checkpoint was actually saved in this step or if save_last="link"
404-
if (self._last_global_step_saved == trainer.global_step or
405-
(self.save_last == "link" and self._last_checkpoint_saved)):
405+
if self._last_global_step_saved == trainer.global_step or (
406+
self.save_last == "link" and self._last_checkpoint_saved
407+
):
406408
self._save_last_checkpoint(trainer, monitor_candidates)
407409

408410
@override

tests/tests_pytorch/checkpointing/test_model_checkpoint.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2128,32 +2128,32 @@ def test_save_last_without_save_on_train_epoch_and_without_val(tmp_path):
21282128

21292129
def test_save_last_only_when_checkpoint_saved(tmp_path):
21302130
"""Test that save_last only creates last.ckpt when another checkpoint is actually saved."""
2131-
2131+
21322132
class SelectiveModel(BoringModel):
21332133
def __init__(self):
21342134
super().__init__()
21352135
self.validation_step_outputs = []
2136-
2136+
21372137
def validation_step(self, batch, batch_idx):
21382138
outputs = super().validation_step(batch, batch_idx)
21392139
epoch = self.trainer.current_epoch
21402140
loss = torch.tensor(1.0 - epoch * 0.1) if epoch % 2 == 0 else torch.tensor(1.0 + epoch * 0.1)
21412141
outputs["val_loss"] = loss
21422142
self.validation_step_outputs.append(outputs)
21432143
return outputs
2144-
2144+
21452145
def on_validation_epoch_end(self):
21462146
if self.validation_step_outputs:
21472147
avg_loss = torch.stack([x["val_loss"] for x in self.validation_step_outputs]).mean()
21482148
self.log("val_loss", avg_loss)
21492149
self.validation_step_outputs.clear()
21502150

21512151
model = SelectiveModel()
2152-
2152+
21532153
checkpoint_callback = ModelCheckpoint(
21542154
dirpath=tmp_path,
21552155
filename="best-{epoch}-{val_loss:.2f}",
2156-
monitor="val_loss",
2156+
monitor="val_loss",
21572157
save_last=True,
21582158
save_top_k=1,
21592159
mode="min",
@@ -2177,4 +2177,6 @@ def on_validation_epoch_end(self):
21772177
checkpoint_names = [f.name for f in checkpoint_files]
21782178
assert "last.ckpt" in checkpoint_names, "last.ckpt should exist since checkpoints were saved"
21792179
expected_files = 2 # best checkpoint + last.ckpt
2180-
assert len(checkpoint_files) == expected_files, f"Expected {expected_files} files, got {len(checkpoint_files)}: {checkpoint_names}"
2180+
assert len(checkpoint_files) == expected_files, (
2181+
f"Expected {expected_files} files, got {len(checkpoint_files)}: {checkpoint_names}"
2182+
)

0 commit comments

Comments
 (0)