Skip to content

Commit f23e5c4

Browse files
564612540facebook-github-bot
authored andcommitted
Add copyright and license info (#719)
Summary: ## Types of changes - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to change) - [x] Docs change / refactoring / dependency upgrade ## Motivation and Context / Related issue ## How Has This Been Tested (if it applies) ## Checklist - [x] The documentation is up-to-date with the changes I made. - [x] I have read the **CONTRIBUTING** document and completed the CLA (see **CONTRIBUTING**). - [ ] All tests passed, and additional code has been covered with new tests. Pull Request resolved: #719 Reviewed By: iden-kalemaj Differential Revision: D68476050 fbshipit-source-id: 2330aeccdc2fa6540dc70ae35b7e42d51871d6f8
1 parent 86b4ab4 commit f23e5c4

10 files changed

+154
-8
lines changed

research/disk_optimizer/KFprivacy_engine.py

+14
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
# Copyright (c) Xinwei Zhang
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
115
from typing import List, Union
216

317
from opacus.optimizers import DPOptimizer

research/disk_optimizer/ReadMe.md

+3-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,9 @@
33
## Introduction
44
This part of the code introduces a new component to the optimizer named DiSK. The code uses a simplifed Kalman to improve the privatized gradient estimate. Speficially, the privatized minibatch gradient is replaced with:
55

6-
$$\mathbb{g_{t+\frac{1}{2}}} = \frac{1}{B}\sum_{\xi \in \mathcal{B}_t} \mathrm{clip}_C\left(\frac{1-\kappa}{\kappa\gamma}\nabla f(x_t + \gamma(x_t-x_{t-1});\xi) + \Big(1- \frac{1-\kappa}{\kappa\gamma}\Big)\nabla f(x_t;\xi)\right) + w_t$$
6+
7+
$$\mathbb{g}_{t+\frac{1}{2}}(\xi) = \frac{1-\kappa}{\kappa\gamma}\nabla f(x_t + \gamma(x_t-x_{t-1});\xi) + \Big(1- \frac{1-\kappa}{\kappa\gamma}\Big)\nabla f(x_t;\xi)$$
8+
$$\mathbb{g_{t+\frac{1}{2}}} = \frac{1}{B}\sum_{\xi \in \mathcal{B}_t} \mathrm{clip}_C\left(\mathbb{g}_{t+\frac{1}{2}}(\xi)\right) + w_t$$
79
$$g_{t}= (1-\kappa)g_{t-1} + \kappa g_{t+\frac{1}{2}}$$
810

911
A detailed description of the algorithm can be found at [Here](https://arxiv.org/abs/2410.03883).

research/disk_optimizer/optimizers/KFadaclipoptimizer.py

+17-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
# Copyright (c) Xinwei Zhang
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
115
from __future__ import annotations
216

317
import logging
@@ -33,6 +47,7 @@ def __init__(
3347
secure_mode: bool = False,
3448
kappa: float = 0.7,
3549
gamma: float = 0.5,
50+
**kwargs,
3651
):
3752
if gamma == 0 or abs(gamma - (1 - kappa) / kappa) < 1e-3:
3853
gamma = (1 - kappa) / kappa
@@ -58,6 +73,7 @@ def __init__(
5873
max_clipbound=max_clipbound,
5974
min_clipbound=min_clipbound,
6075
unclipped_num_std=unclipped_num_std,
76+
**kwargs,
6177
)
6278
self.kappa = kappa
6379
self.gamma = gamma
@@ -79,7 +95,7 @@ def step(self, closure=required) -> Optional[float]:
7995
first_step = True
8096
state["kf_d_t"] = torch.zeros_like(p.data).to(p.data)
8197
state["kf_m_t"] = grad.clone().to(p.data)
82-
state["kf_m_t"].lerp_(grad, weight=self.kappa)
98+
state["kf_m_t"].lerp_(grad, weight=1 - self.kappa)
8399
p.grad = state["kf_m_t"].clone().to(p.data)
84100
state["kf_d_t"] = -p.data.clone().to(p.data)
85101
if first_step:

research/disk_optimizer/optimizers/KFddp_perlayeroptimizer.py

+18
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
# Copyright (c) Xinwei Zhang
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
115
from __future__ import annotations
216

317
from functools import partial
@@ -29,6 +43,7 @@ def __init__(
2943
secure_mode: bool = False,
3044
kappa: float = 0.7,
3145
gamma: float = 0.5,
46+
**kwargs,
3247
):
3348
self.rank = torch.distributed.get_rank()
3449
self.world_size = torch.distributed.get_world_size()
@@ -43,6 +58,7 @@ def __init__(
4358
secure_mode=secure_mode,
4459
kappa=kappa,
4560
gamma=gamma,
61+
**kwargs,
4662
)
4763

4864

@@ -64,6 +80,7 @@ def __init__(
6480
secure_mode: bool = False,
6581
kappa: float = 0.7,
6682
gamma: float = 0.5,
83+
**kwargs,
6784
):
6885
self.rank = torch.distributed.get_rank()
6986
self.world_size = torch.distributed.get_world_size()
@@ -79,6 +96,7 @@ def __init__(
7996
secure_mode=secure_mode,
8097
kappa=kappa,
8198
gamma=gamma,
99+
**kwargs,
82100
)
83101
self._register_hooks()
84102

research/disk_optimizer/optimizers/KFddpoptimizer.py

+17-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
# Copyright (c) Xinwei Zhang
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
115
from __future__ import annotations
216

317
import logging
@@ -32,6 +46,7 @@ def __init__(
3246
secure_mode: bool = False,
3347
kappa=0.7,
3448
gamma=0.5,
49+
**kwargs,
3550
):
3651
super().__init__(
3752
optimizer,
@@ -43,6 +58,7 @@ def __init__(
4358
secure_mode=secure_mode,
4459
kappa=kappa,
4560
gamma=gamma,
61+
**kwargs,
4662
)
4763
self.rank = torch.distributed.get_rank()
4864
self.world_size = torch.distributed.get_world_size()
@@ -80,7 +96,7 @@ def step(self, closure=required) -> Optional[float]:
8096
first_step = True
8197
state["kf_d_t"] = torch.zeros_like(p.data).to(p.data)
8298
state["kf_m_t"] = grad.clone().to(p.data)
83-
state["kf_m_t"].lerp_(grad, weight=self.kappa)
99+
state["kf_m_t"].lerp_(grad, weight=1 - self.kappa)
84100
p.grad = state["kf_m_t"].clone().to(p.data)
85101
state["kf_d_t"] = -p.data.clone().to(p.data)
86102
if first_step:

research/disk_optimizer/optimizers/KFddpoptimizer_fast_gradient_clipping.py

+17-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
# Copyright (c) Xinwei Zhang
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
115
from __future__ import annotations
216

317
import logging
@@ -32,6 +46,7 @@ def __init__(
3246
secure_mode: bool = False,
3347
kappa=0.7,
3448
gamma=0.5,
49+
**kwargs,
3550
):
3651
super().__init__(
3752
optimizer,
@@ -43,6 +58,7 @@ def __init__(
4358
secure_mode=secure_mode,
4459
kappa=kappa,
4560
gamma=gamma,
61+
**kwargs,
4662
)
4763
self.rank = torch.distributed.get_rank()
4864
self.world_size = torch.distributed.get_world_size()
@@ -80,7 +96,7 @@ def step(self, closure=required) -> Optional[float]:
8096
first_step = True
8197
state["kf_d_t"] = torch.zeros_like(p.data).to(p.data)
8298
state["kf_m_t"] = grad.clone().to(p.data)
83-
state["kf_m_t"].lerp_(grad, weight=self.kappa)
99+
state["kf_m_t"].lerp_(grad, weight=1 - self.kappa)
84100
p.grad = state["kf_m_t"].clone().to(p.data)
85101
state["kf_d_t"] = -p.data.clone().to(p.data)
86102
if first_step:

research/disk_optimizer/optimizers/KFoptimizer.py

+17-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
# Copyright (c) Xinwei Zhang
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
115
from __future__ import annotations
216

317
import logging
@@ -27,6 +41,7 @@ def __init__(
2741
secure_mode: bool = False,
2842
kappa=0.7,
2943
gamma=0.5,
44+
**kwargs,
3045
):
3146
if gamma == 0 or abs(gamma - (1 - kappa) / kappa) < 1e-3:
3247
gamma = (1 - kappa) / kappa
@@ -47,6 +62,7 @@ def __init__(
4762
loss_reduction=loss_reduction,
4863
generator=generator,
4964
secure_mode=secure_mode,
65+
**kwargs,
5066
)
5167
self.kappa = kappa
5268
self.gamma = gamma
@@ -131,7 +147,7 @@ def step(self, closure=required) -> Optional[float]:
131147
first_step = True
132148
state["kf_d_t"] = torch.zeros_like(p.data).to(p.data)
133149
state["kf_m_t"] = grad.clone().to(p.data)
134-
state["kf_m_t"].lerp_(grad, weight=self.kappa)
150+
state["kf_m_t"].lerp_(grad, weight=1 - self.kappa)
135151
p.grad = state["kf_m_t"].clone().to(p.data)
136152
state["kf_d_t"] = -p.data.clone().to(p.data)
137153
if first_step:

research/disk_optimizer/optimizers/KFoptimizer_fast_gradient_clipping.py

+21-3
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,26 @@
1+
# Copyright (c) Xinwei Zhang
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
115
from __future__ import annotations
216

317
import logging
418
from typing import Optional
519

620
import torch
21+
from opacus.optimizers.optimizer_fast_gradient_clipping import (
22+
DPOptimizerFastGradientClipping,
23+
)
724
from torch.optim import Optimizer
825
from torch.optim.optimizer import required
926

@@ -13,8 +30,7 @@
1330
logger = logging.getLogger(__name__)
1431
logger.disabled = True
1532

16-
17-
class KF_DPOptimizerFastGradientClipping(KF_DPOptimizer):
33+
class KF_DPOptimizerFastGradientClipping(DPOptimizerFastGradientClipping, KF_DPOptimizer):
1834
def __init__(
1935
self,
2036
optimizer: Optimizer,
@@ -27,8 +43,9 @@ def __init__(
2743
secure_mode: bool = False,
2844
kappa=0.7,
2945
gamma=0.5,
46+
**kwargs,
3047
):
31-
super().__init__(
48+
super(KF_DPOptimizer).__init__(
3249
optimizer,
3350
noise_multiplier=noise_multiplier,
3451
max_grad_norm=max_grad_norm,
@@ -38,6 +55,7 @@ def __init__(
3855
secure_mode=secure_mode,
3956
kappa=kappa,
4057
gamma=gamma,
58+
**kwargs,
4159
)
4260

4361
def _compute_one_closure(self, closure=required):

research/disk_optimizer/optimizers/KFperlayeroptimizer.py

+16
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
# Copyright (c) Xinwei Zhang
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
115
from __future__ import annotations
216

317
import logging
@@ -28,6 +42,7 @@ def __init__(
2842
secure_mode: bool = False,
2943
kappa=0.7,
3044
gamma=0.5,
45+
**kwargs,
3146
):
3247
assert len(max_grad_norm) == len(params(optimizer))
3348
self.max_grad_norms = max_grad_norm
@@ -42,6 +57,7 @@ def __init__(
4257
secure_mode=secure_mode,
4358
kappa=kappa,
4459
gamma=gamma,
60+
**kwargs,
4561
)
4662

4763
def clip_and_accumulate(self):

research/disk_optimizer/optimizers/__init__.py

+14
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
# Copyright (c) Xinwei Zhang
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
115
from opacus.optimizers import (
216
AdaClipDPOptimizer,
317
DistributedDPOptimizer,

0 commit comments

Comments
 (0)