Skip to content

Commit e5aa008

Browse files
author
Virginia Fernandez
committed
There was a bug in the LatentDiffusionInferer and ControlnetLatentDiffusionInferer when save_intermediates was off but there's need to pad and crop latent space.
Signed-off-by: Virginia Fernandez <[email protected]>
1 parent 08aef50 commit e5aa008

File tree

1 file changed

+5
-2
lines changed

1 file changed

+5
-2
lines changed

monai/inferers/inferer.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from functools import partial
1919
from pydoc import locate
2020
from typing import Any
21+
2122
import torch
2223
import torch.nn as nn
2324
import torch.nn.functional as F
@@ -1203,7 +1204,8 @@ def sample( # type: ignore[override]
12031204
latent = torch.stack([self.autoencoder_resizer(i) for i in decollate_batch(latent)], 0)
12041205
if save_intermediates:
12051206
latent_intermediates = [
1206-
torch.stack([self.autoencoder_resizer(i) for i in decollate_batch(l)], 0) for l in latent_intermediates
1207+
torch.stack([self.autoencoder_resizer(i) for i in decollate_batch(l)], 0)
1208+
for l in latent_intermediates
12071209
]
12081210

12091211
decode = autoencoder_model.decode_stage_2_outputs
@@ -1729,7 +1731,8 @@ def sample( # type: ignore[override]
17291731
latent = torch.stack([self.autoencoder_resizer(i) for i in decollate_batch(latent)], 0)
17301732
if save_intermediates:
17311733
latent_intermediates = [
1732-
torch.stack([self.autoencoder_resizer(i) for i in decollate_batch(l)], 0) for l in latent_intermediates
1734+
torch.stack([self.autoencoder_resizer(i) for i in decollate_batch(l)], 0)
1735+
for l in latent_intermediates
17331736
]
17341737

17351738
decode = autoencoder_model.decode_stage_2_outputs

0 commit comments

Comments
 (0)