diff --git a/main.py b/main.py index ff6c248e..3854e208 100644 --- a/main.py +++ b/main.py @@ -694,7 +694,7 @@ def init_wandb(save_dir, opt, config, group_name, name_str): # TODO change once leaving "swiffer" config directory try: group_name = nowname.split(now)[-1].split("-")[1] - except: + except Exception: group_name = nowname default_logger_cfg["params"]["group"] = group_name init_wandb( @@ -842,7 +842,7 @@ def init_wandb(save_dir, opt, config, group_name, name_str): print( f"{k}, {data.datasets[k].__class__.__name__}, {len(data.datasets[k])}" ) - except: + except Exception: print("datasets not yet initialized.") # configure learning rate diff --git a/scripts/demo/detect.py b/scripts/demo/detect.py index 96e9f212..6e219204 100644 --- a/scripts/demo/detect.py +++ b/scripts/demo/detect.py @@ -45,7 +45,7 @@ def decode(self, cv2Image, method="dwtDct", **configs): bits = embed.decode(cv2Image) return self.reconstruct(bits) - except: + except Exception: raise e diff --git a/scripts/demo/streamlit_helpers.py b/scripts/demo/streamlit_helpers.py index 01ffe612..631c967b 100644 --- a/scripts/demo/streamlit_helpers.py +++ b/scripts/demo/streamlit_helpers.py @@ -489,7 +489,7 @@ def do_sample( if isinstance(batch[key], torch.Tensor): print(key, batch[key].shape) elif isinstance(batch[key], list): - print(key, [len(l) for l in batch[key]]) + print(key, [len(lst) for lst in batch[key]]) else: print(key, batch[key]) c, uc = model.conditioner.get_unconditional_conditioning( diff --git a/sgm/modules/attention.py b/sgm/modules/attention.py index 15d66a36..f943bb52 100644 --- a/sgm/modules/attention.py +++ b/sgm/modules/attention.py @@ -9,10 +9,10 @@ from torch import nn from ..util import default, exists +from .diffusionmodules.util import checkpoint logger = logging.getLogger(__name__) - if version.parse(torch.__version__) >= version.parse("2.0.0"): SDP_IS_AVAILABLE = True from torch.backends.cuda import SDPBackend, sdp_kernel @@ -51,12 +51,10 @@ import xformers.ops XFORMERS_IS_AVAILABLE = True -except: +except Exception: XFORMERS_IS_AVAILABLE = False logger.debug("no module 'xformers'. Processing without...") -from .diffusionmodules.util import checkpoint - def uniq(arr): # TODO: this seems unused return {el: True for el in arr}.keys() diff --git a/sgm/modules/diffusionmodules/model.py b/sgm/modules/diffusionmodules/model.py index 33c25212..72786176 100644 --- a/sgm/modules/diffusionmodules/model.py +++ b/sgm/modules/diffusionmodules/model.py @@ -10,6 +10,8 @@ from einops import rearrange from packaging import version +from ...modules.attention import LinearAttention, MemoryEfficientCrossAttention + logger = logging.getLogger(__name__) try: @@ -17,12 +19,10 @@ import xformers.ops XFORMERS_IS_AVAILABLE = True -except: +except Exception: XFORMERS_IS_AVAILABLE = False logger.debug("no module 'xformers'. Processing without...") -from ...modules.attention import LinearAttention, MemoryEfficientCrossAttention - def get_timestep_embedding(timesteps, embedding_dim): """ @@ -299,7 +299,9 @@ def make_attn(in_channels, attn_type="vanilla", attn_kwargs=None): ) attn_type = "vanilla" attn_kwargs = None - logger.debug(f"making attention of type '{attn_type}' with {in_channels} in_channels") + logger.debug( + f"making attention of type '{attn_type}' with {in_channels} in_channels" + ) if attn_type == "vanilla": assert attn_kwargs is None return AttnBlock(in_channels) diff --git a/sgm/modules/diffusionmodules/openaimodel.py b/sgm/modules/diffusionmodules/openaimodel.py index 1c87442a..0c0458df 100644 --- a/sgm/modules/diffusionmodules/openaimodel.py +++ b/sgm/modules/diffusionmodules/openaimodel.py @@ -649,7 +649,8 @@ def __init__( self.use_fairscale_checkpoint = False checkpoint_wrapper_fn = ( - partial(checkpoint_wrapper, offload_to_cpu=offload_to_cpu) + # TODO: this can't work since `checkpoint_wrapper` is not defined + partial(checkpoint_wrapper, offload_to_cpu=offload_to_cpu) # noqa: F821 if self.use_fairscale_checkpoint else lambda x: x ) diff --git a/sgm/util.py b/sgm/util.py index 3e16865a..414458c3 100644 --- a/sgm/util.py +++ b/sgm/util.py @@ -31,7 +31,7 @@ def get_string_from_tuple(s): return t[0] else: pass - except: + except Exception: pass return s @@ -164,7 +164,9 @@ def mean_flat(tensor): def count_params(model, verbose=False): total_params = sum(p.numel() for p in model.parameters()) if verbose: - logger.info(f"{model.__class__.__name__} has {total_params * 1.e-6:.2f} M params.") + logger.info( + f"{model.__class__.__name__} has {total_params * 1.e-6:.2f} M params." + ) return total_params