diff --git a/opacus/tests/grad_sample_module_fast_gradient_clipping_test.py b/opacus/tests/grad_sample_module_fast_gradient_clipping_test.py index b0006edf..186a51b9 100644 --- a/opacus/tests/grad_sample_module_fast_gradient_clipping_test.py +++ b/opacus/tests/grad_sample_module_fast_gradient_clipping_test.py @@ -120,7 +120,7 @@ def setUp_data_sequantial(self, size, length, dim): @given( size=st.sampled_from([10]), - length=st.sampled_from([1, 10]), + length=st.sampled_from([5]), dim=st.sampled_from([2]), ) @settings(deadline=1000000) @@ -192,12 +192,12 @@ def test_norm_calculation_fast_gradient_clipping(self, size, length, dim): diff = flat_norms_normal - flat_norms_gc logging.info(f"Max difference between (vanilla) Opacus and FGC = {max(diff)}") - msg = "Fail: Gradients from vanilla DP-SGD and from fast gradient clipping are different" + msg = "Fail: Per-sample gradient norms from vanilla DP-SGD and from fast gradient clipping are different" assert torch.allclose(flat_norms_normal, flat_norms_gc, atol=1e-3), msg @given( size=st.sampled_from([10]), - length=st.sampled_from([1, 10]), + length=st.sampled_from([5]), dim=st.sampled_from([2]), ) @settings(deadline=1000000) diff --git a/opacus/tests/privacy_engine_test.py b/opacus/tests/privacy_engine_test.py index f2ed1a32..4608a611 100644 --- a/opacus/tests/privacy_engine_test.py +++ b/opacus/tests/privacy_engine_test.py @@ -268,7 +268,7 @@ def _compare_to_vanilla( do_clip=st.booleans(), do_noise=st.booleans(), use_closure=st.booleans(), - max_steps=st.sampled_from([1, 4]), + max_steps=st.sampled_from([1, 3]), ) @settings(suppress_health_check=list(HealthCheck), deadline=None) def test_compare_to_vanilla( @@ -660,7 +660,7 @@ def test_checkpoints( @given( noise_multiplier=st.floats(0.5, 5.0), - max_steps=st.integers(8, 10), + max_steps=st.integers(3, 5), secure_mode=st.just(False), # TODO: enable after fixing torchcsprng build ) @settings(suppress_health_check=list(HealthCheck), deadline=None)