Skip to content

Commit

Permalink
Fix lint
Browse files Browse the repository at this point in the history
Differential Revision: D53198074
  • Loading branch information
Karthik Prasad authored and facebook-github-bot committed Jan 29, 2024
1 parent f4dc430 commit 0685c59
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 16 deletions.
2 changes: 1 addition & 1 deletion examples/dcgan.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def __init__(self, ngpu):
nn.ReLU(True),
# state size. (ngf) x 32 x 32
nn.ConvTranspose2d(ngf, nc, 4, 2, 1, bias=False),
nn.Tanh()
nn.Tanh(),
# state size. (nc) x 64 x 64
)

Expand Down
1 change: 1 addition & 0 deletions opacus/accountants/analysis/prv/domain.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ class Domain:
Stores relevant information about the domain on which PRVs are discretized, and
includes a few convenience methods for manipulating it.
"""

t_min: float
t_max: float
size: int
Expand Down
7 changes: 4 additions & 3 deletions opacus/layers/dp_rnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,9 +405,10 @@ def forward(
for direction, (cell, h0, c0) in directions:
# apply single direction layer (with dropout)
out_layer, h, c = self.forward_layer(
x
if layer == 0
else output, # [T, B, D/H/2H] / tuple T x [B, D/H/2H]
(
x if layer == 0 else output
# [T, B, D/H/2H] / tuple T x [B, D/H/2H]
),
h0, # [B, H]
c0,
batch_sizes,
Expand Down
6 changes: 3 additions & 3 deletions opacus/privacy_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,9 +536,9 @@ def save_checkpoint(
if noise_scheduler is not None:
checkpoint_dict["noise_scheduler_state_dict"] = noise_scheduler.state_dict()
if grad_clip_scheduler is not None:
checkpoint_dict[
"grad_clip_scheduler_state_dict"
] = grad_clip_scheduler.state_dict()
checkpoint_dict["grad_clip_scheduler_state_dict"] = (
grad_clip_scheduler.state_dict()
)

torch.save(checkpoint_dict, path, **(torch_save_kwargs or {}))

Expand Down
6 changes: 3 additions & 3 deletions opacus/tests/grad_samples/conv2d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,9 @@ def test_conv2d(
# Test 'convolution as a backward' GSM
# 'convolution as a backward' doesn't support padding=same
conv2d_gsm = GradSampleModule.GRAD_SAMPLERS[nn.Conv2d]
GradSampleModule.GRAD_SAMPLERS[
nn.Conv2d
] = convolution2d_backward_as_a_convolution
GradSampleModule.GRAD_SAMPLERS[nn.Conv2d] = (
convolution2d_backward_as_a_convolution
)
self.run_test(
x,
conv,
Expand Down
16 changes: 10 additions & 6 deletions opacus/tests/privacy_engine_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,11 @@ def _init_vanilla_training(
):
model = self._init_model()
optimizer = torch.optim.SGD(
model.parameters()
if not opt_exclude_frozen
else [p for p in model.parameters() if p.requires_grad],
(
model.parameters()
if not opt_exclude_frozen
else [p for p in model.parameters() if p.requires_grad]
),
lr=self.LR,
momentum=0,
)
Expand All @@ -112,9 +114,11 @@ def _init_private_training(
model = self._init_model()
model = PrivacyEngine.get_compatible_module(model)
optimizer = torch.optim.SGD(
model.parameters()
if not opt_exclude_frozen
else [p for p in model.parameters() if p.requires_grad],
(
model.parameters()
if not opt_exclude_frozen
else [p for p in model.parameters() if p.requires_grad]
),
lr=self.LR,
momentum=0,
)
Expand Down

0 comments on commit 0685c59

Please sign in to comment.