1414# limitations under the License.
1515
1616"""
17- Tests for accounting_noise_multiplier property to ensure correct privacy accounting,
18- especially for AdaClipDPOptimizer which internally adjusts noise_multiplier.
17+ Tests for AdaClipDPOptimizer to ensure correct privacy accounting.
18+
19+ The AdaClip optimizer uses an adjusted noise multiplier for gradient noise
20+ (Theorem 1 from https://arxiv.org/pdf/1905.03871.pdf), but the original
21+ noise_multiplier should be preserved for privacy accounting.
1922"""
2023
2124import unittest
@@ -38,8 +41,8 @@ def forward(self, x):
3841 return self .fc (x )
3942
4043
41- class AccountingNoiseMultiplierTest (unittest .TestCase ):
42- """Test that accounting_noise_multiplier property works correctly ."""
44+ class AdaClipNoiseMultiplierTest (unittest .TestCase ):
45+ """Test that AdaClip preserves original noise_multiplier for privacy accounting ."""
4346
4447 def setUp (self ):
4548 # For AdaClip: noise_multiplier must be < 2 * unclipped_num_std
@@ -48,27 +51,8 @@ def setUp(self):
4851 self .max_grad_norm = 1.0
4952 torch .manual_seed (42 )
5053
51- def test_dpoptimizer_accounting_noise_multiplier (self ):
52- """Test that DPOptimizer.accounting_noise_multiplier returns noise_multiplier."""
53- model = SimpleModel ()
54- optimizer = torch .optim .SGD (model .parameters (), lr = 0.01 )
55-
56- dp_optimizer = DPOptimizer (
57- optimizer = optimizer ,
58- noise_multiplier = self .noise_multiplier ,
59- max_grad_norm = self .max_grad_norm ,
60- expected_batch_size = 32 ,
61- )
62-
63- # For standard DPOptimizer, accounting_noise_multiplier should equal noise_multiplier
64- self .assertEqual (
65- dp_optimizer .accounting_noise_multiplier ,
66- dp_optimizer .noise_multiplier ,
67- )
68- self .assertEqual (dp_optimizer .accounting_noise_multiplier , self .noise_multiplier )
69-
70- def test_adaclip_stores_original_noise_multiplier (self ):
71- """Test that AdaClipDPOptimizer stores and returns the original noise_multiplier."""
54+ def test_adaclip_preserves_noise_multiplier (self ):
55+ """Test that AdaClipDPOptimizer preserves original noise_multiplier."""
7256 model = SimpleModel ()
7357 optimizer = torch .optim .SGD (model .parameters (), lr = 0.01 )
7458
@@ -84,30 +68,10 @@ def test_adaclip_stores_original_noise_multiplier(self):
8468 expected_batch_size = 32 ,
8569 )
8670
87- # Store the adjusted noise_multiplier for comparison
88- adjusted_noise_multiplier = adaclip_optimizer .noise_multiplier
89-
90- # accounting_noise_multiplier should return the original value
91- self .assertEqual (
92- adaclip_optimizer .accounting_noise_multiplier , self .noise_multiplier
93- )
94-
95- # Verify that noise_multiplier was adjusted according to Theorem 1
96- # noise_multiplier = (sigma^-2 - (2*sigma_u)^-2)^(-1/2)
97- expected_adjusted = (
98- self .noise_multiplier ** (- 2 ) - (2 * self .unclipped_num_std ) ** (- 2 )
99- ) ** (- 1 / 2 )
100- self .assertAlmostEqual (
101- adjusted_noise_multiplier , expected_adjusted , places = 5
102- )
71+ # noise_multiplier should remain unchanged (original value)
72+ self .assertEqual (adaclip_optimizer .noise_multiplier , self .noise_multiplier )
10373
104- # accounting_noise_multiplier should differ from the adjusted noise_multiplier
105- self .assertNotEqual (
106- adaclip_optimizer .accounting_noise_multiplier ,
107- adaclip_optimizer .noise_multiplier ,
108- )
109-
110- def test_adaclip_accounting_with_zero_noise (self ):
74+ def test_adaclip_with_zero_noise (self ):
11175 """Test that AdaClipDPOptimizer handles zero noise_multiplier correctly."""
11276 model = SimpleModel ()
11377 optimizer = torch .optim .SGD (model .parameters (), lr = 0.01 )
@@ -124,12 +88,11 @@ def test_adaclip_accounting_with_zero_noise(self):
12488 expected_batch_size = 32 ,
12589 )
12690
127- # Both should be zero
128- self .assertEqual (adaclip_optimizer .accounting_noise_multiplier , 0.0 )
91+ # noise_multiplier should be zero
12992 self .assertEqual (adaclip_optimizer .noise_multiplier , 0.0 )
13093
131- def test_accountant_uses_accounting_noise_multiplier (self ):
132- """Test that accountant hook code path uses accounting_noise_multiplier from optimizer."""
94+ def test_accountant_uses_original_noise_multiplier (self ):
95+ """Test that accountant hook uses original noise_multiplier from AdaClip optimizer."""
13396 model = SimpleModel ()
13497 optimizer = torch .optim .SGD (model .parameters (), lr = 0.01 )
13598 accountant = RDPAccountant ()
@@ -149,11 +112,11 @@ def test_accountant_uses_accounting_noise_multiplier(self):
149112
150113 sample_rate = 0.01
151114
152- # Manually call accountant.step with accounting_noise_multiplier
115+ # Manually call accountant.step with noise_multiplier
153116 # (mimicking what the hook would do)
154117 initial_len = len (accountant )
155118 accountant .step (
156- noise_multiplier = adaclip_optimizer .accounting_noise_multiplier ,
119+ noise_multiplier = adaclip_optimizer .noise_multiplier ,
157120 sample_rate = sample_rate ,
158121 )
159122
@@ -165,9 +128,8 @@ def test_accountant_uses_accounting_noise_multiplier(self):
165128 last_entry = accountant .history [- 1 ]
166129 recorded_noise_multiplier = last_entry [0 ]
167130
168- # Should use accounting_noise_multiplier ( original), not adjusted
131+ # Should use original noise_multiplier
169132 self .assertEqual (recorded_noise_multiplier , self .noise_multiplier )
170- self .assertNotEqual (recorded_noise_multiplier , adaclip_optimizer .noise_multiplier )
171133
172134 def test_privacy_accounting_with_adaclip_e2e (self ):
173135 """End-to-end test: verify privacy accounting is correct with AdaClip via PrivacyEngine."""
@@ -202,6 +164,9 @@ def test_privacy_accounting_with_adaclip_e2e(self):
202164 # Verify optimizer is AdaClip
203165 self .assertIsInstance (dp_optimizer , AdaClipDPOptimizer )
204166
167+ # Verify noise_multiplier is preserved
168+ self .assertEqual (dp_optimizer .noise_multiplier , self .noise_multiplier )
169+
205170 # Get the accountant
206171 accountant = privacy_engine .accountant
207172
@@ -216,48 +181,14 @@ def test_privacy_accounting_with_adaclip_e2e(self):
216181 loss .backward ()
217182 dp_optimizer .step ()
218183
219- # Verify accountant recorded steps with accounting_noise_multiplier
184+ # Verify accountant recorded steps with original noise_multiplier
220185 self .assertGreater (len (accountant ), 0 )
221186
222187 # All recorded noise multipliers should be the original value
223188 for entry in accountant .history :
224189 recorded_noise = entry [0 ]
225- # Should match accounting_noise_multiplier (original)
226- self .assertEqual (recorded_noise , dp_optimizer .accounting_noise_multiplier )
227- # Should NOT match the adjusted noise_multiplier
228- self .assertNotEqual (recorded_noise , dp_optimizer .noise_multiplier )
229-
230- def test_adaclip_accounting_multiplier_immutable (self ):
231- """Test that accounting_noise_multiplier remains constant even as noise_multiplier changes."""
232- model = SimpleModel ()
233- optimizer = torch .optim .SGD (model .parameters (), lr = 0.01 )
234-
235- adaclip_optimizer = AdaClipDPOptimizer (
236- optimizer = optimizer ,
237- noise_multiplier = self .noise_multiplier ,
238- target_unclipped_quantile = 0.5 ,
239- clipbound_learning_rate = 0.01 ,
240- max_clipbound = 2.0 ,
241- min_clipbound = 0.5 ,
242- unclipped_num_std = self .unclipped_num_std ,
243- max_grad_norm = self .max_grad_norm ,
244- expected_batch_size = 32 ,
245- )
246-
247- # Store original values
248- original_accounting = adaclip_optimizer .accounting_noise_multiplier
249- original_noise = adaclip_optimizer .noise_multiplier
250-
251- # Manually modify noise_multiplier (simulating what might happen during training)
252- adaclip_optimizer .noise_multiplier = 2.0
253-
254- # accounting_noise_multiplier should remain unchanged
255- self .assertEqual (adaclip_optimizer .accounting_noise_multiplier , original_accounting )
256- self .assertEqual (adaclip_optimizer .accounting_noise_multiplier , self .noise_multiplier )
257-
258- # But noise_multiplier should reflect the change
259- self .assertNotEqual (adaclip_optimizer .noise_multiplier , original_noise )
260- self .assertEqual (adaclip_optimizer .noise_multiplier , 2.0 )
190+ # Should match original noise_multiplier
191+ self .assertEqual (recorded_noise , self .noise_multiplier )
261192
262193 def test_comparison_dpoptimizer_vs_adaclip_accounting (self ):
263194 """Compare accounting between standard DPOptimizer and AdaClip with same initial noise."""
@@ -289,18 +220,47 @@ def test_comparison_dpoptimizer_vs_adaclip_accounting(self):
289220 expected_batch_size = 32 ,
290221 )
291222
292- # Both should report the same accounting_noise_multiplier
223+ # Both should have the same noise_multiplier for accounting
293224 self .assertEqual (
294- dp_optimizer .accounting_noise_multiplier ,
295- adaclip_optimizer .accounting_noise_multiplier ,
296- )
297-
298- # But their actual noise_multiplier values differ
299- self .assertNotEqual (
300225 dp_optimizer .noise_multiplier ,
301226 adaclip_optimizer .noise_multiplier ,
302227 )
303228
229+ def test_adaclip_noise_adjustment_calculation (self ):
230+ """Test that the adjusted noise follows Theorem 1 formula when applied internally."""
231+ # According to Theorem 1: σ_eff = (σ^-2 - (2σ_u)^-2)^(-1/2)
232+ sigma = self .noise_multiplier
233+ sigma_u = self .unclipped_num_std
234+
235+ expected_adjusted = (sigma ** (- 2 ) - (2 * sigma_u ) ** (- 2 )) ** (- 1 / 2 )
236+
237+ # Verify the formula produces valid results
238+ self .assertGreater (expected_adjusted , 0 )
239+ # The adjusted noise is larger than the original
240+ # (σ^-2 - positive_term)^(-1/2) > σ when σ < 2*σ_u
241+ self .assertGreater (expected_adjusted , sigma )
242+
243+ def test_adaclip_constraint_validation (self ):
244+ """Test that AdaClip raises error when noise_multiplier >= 2 * unclipped_num_std."""
245+ model = SimpleModel ()
246+ optimizer = torch .optim .SGD (model .parameters (), lr = 0.01 )
247+
248+ # This should raise ValueError: 2.0 >= 2 * 1.0 = 2.0
249+ with self .assertRaises (ValueError ) as context :
250+ AdaClipDPOptimizer (
251+ optimizer = optimizer ,
252+ noise_multiplier = 2.0 ,
253+ target_unclipped_quantile = 0.5 ,
254+ clipbound_learning_rate = 0.01 ,
255+ max_clipbound = 2.0 ,
256+ min_clipbound = 0.5 ,
257+ unclipped_num_std = 1.0 ,
258+ max_grad_norm = self .max_grad_norm ,
259+ expected_batch_size = 32 ,
260+ )
261+
262+ self .assertIn ("noise_multiplier must be smaller than 2 * unclipped_num_std" , str (context .exception ))
263+
304264
305265if __name__ == "__main__" :
306266 unittest .main ()
0 commit comments