File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -1044,7 +1044,7 @@ def _configure_distributed_model(self, model):
1044
1044
self .__check_params (self .module , torch .float )
1045
1045
1046
1046
# zero.Init() handles device placement of model
1047
- if not ( self .dont_change_device or is_zero3_model ) :
1047
+ if not self .dont_change_device :
1048
1048
self .module .to (self .device )
1049
1049
1050
1050
# MoE related initialization
@@ -1080,7 +1080,7 @@ def _configure_distributed_model(self, model):
1080
1080
self .expert_parallel_group = groups ._get_expert_parallel_group_dict ()
1081
1081
self .expert_data_parallel_group = groups ._get_expert_data_parallel_group_dict ()
1082
1082
1083
- if not ( self .amp_enabled () or is_zero3_model ):
1083
+ if not self .amp_enabled ():
1084
1084
self ._broadcast_model ()
1085
1085
1086
1086
# check if parameters are duplicated in optimizer param_groups
You can’t perform that action at this time.
0 commit comments