File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed
src/lightning/pytorch/utilities Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -93,10 +93,10 @@ def convert_zero_checkpoint_to_fp32_state_dict(
93
93
]
94
94
checkpoint_dir = ds_checkpoint_dir (checkpoint_dir )
95
95
optim_files = get_optim_files (checkpoint_dir )
96
- optim_state = torch .load (optim_files [0 ], map_location = CPU_DEVICE )
96
+ optim_state = torch .load (optim_files [0 ], map_location = CPU_DEVICE , weights_only = False )
97
97
zero_stage = optim_state ["optimizer_state_dict" ]["zero_stage" ]
98
98
model_file = get_model_state_file (checkpoint_dir , zero_stage )
99
- client_state = torch .load (model_file , map_location = CPU_DEVICE )
99
+ client_state = torch .load (model_file , map_location = CPU_DEVICE , weights_only = False )
100
100
client_state = {key : value for key , value in client_state .items () if key not in deepspeed_states }
101
101
# State dict keys will include reference to wrapper _LightningModuleWrapperBase in old checkpoints created in
102
102
# Lightning version < 2.1. Delete the `_forward_module` prefix before saving.
You can’t perform that action at this time.
0 commit comments