Skip to content

Commit 7f7987e

Browse files
committed
Remove FSDP traces in AMP precision unit test
1 parent 2f62a0a commit 7f7987e

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

tests/tests_pytorch/plugins/precision/test_amp.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
from unittest.mock import Mock
1515

1616
import pytest
17-
from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
1817
from torch.nn import Module
1918
from torch.optim import Optimizer
2019

@@ -24,7 +23,7 @@
2423

2524
def test_clip_gradients():
2625
"""Test that `.clip_gradients()` is a no-op when clipping is disabled."""
27-
module = FSDP(Mock(spec=Module))
26+
module = Mock(spec=Module)
2827
optimizer = Mock(spec=Optimizer)
2928
precision = MixedPrecision(precision="16-mixed", device="cuda:0", scaler=Mock())
3029
precision.clip_grad_by_value = Mock()
@@ -49,8 +48,9 @@ def test_optimizer_amp_scaling_support_in_step_method():
4948
"""Test that the plugin checks if the optimizer takes over unscaling in its step, making it incompatible with
5049
gradient clipping (example: fused Adam)."""
5150

51+
module = Mock(spec=Module)
5252
optimizer = Mock(_step_supports_amp_scaling=True)
5353
precision = MixedPrecision(precision="16-mixed", device="cuda:0", scaler=Mock())
5454

5555
with pytest.raises(RuntimeError, match="The current optimizer.*does not allow for gradient clipping"):
56-
precision.clip_gradients(optimizer, clip_val=1.0)
56+
precision.clip_gradients(module, optimizer, clip_val=1.0)

0 commit comments

Comments
 (0)