Skip to content

Commit c3f6608

Browse files
committed
Apply manual ruff fixes
1 parent d3e6dbd commit c3f6608

File tree

3 files changed

+24
-9
lines changed

3 files changed

+24
-9
lines changed

Diff for: k_diffusion/gns.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ class DDPGradientStatsHook:
55
def __init__(self, ddp_module):
66
try:
77
ddp_module.register_comm_hook(self, self._hook_fn)
8-
except AttributeError:
9-
raise ValueError('DDPGradientStatsHook does not support non-DDP wrapped modules')
8+
except AttributeError as ae:
9+
raise ValueError('DDPGradientStatsHook does not support non-DDP wrapped modules') from ae
1010
self._clear_state()
1111

1212
def _clear_state(self):

Diff for: k_diffusion/models/axial_rope.py

+7-1
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,13 @@ def init(shape):
8484

8585

8686
class AxialRoPE(nn.Module):
87-
def __init__(self, dim, n_heads, start_index=0, freqs_init=freqs_pixel_log(max_freq=10.0)):
87+
def __init__(
88+
self,
89+
dim,
90+
n_heads,
91+
start_index=0,
92+
freqs_init=freqs_pixel_log(max_freq=10.0), # noqa: B008
93+
):
8894
super().__init__()
8995
self.n_heads = n_heads
9096
self.start_index = start_index

Diff for: k_diffusion/utils.py

+15-6
Original file line numberDiff line numberDiff line change
@@ -180,8 +180,11 @@ def __init__(self, optimizer, inv_gamma=1., power=1., warmup=0., min_lr=0.,
180180

181181
def get_lr(self):
182182
if not self._get_lr_called_within_step:
183-
warnings.warn("To get the last learning rate computed by the scheduler, "
184-
"please use `get_last_lr()`.")
183+
warnings.warn(
184+
"To get the last learning rate computed by the scheduler, "
185+
"please use `get_last_lr()`.",
186+
stacklevel=1,
187+
)
185188

186189
return self._get_closed_form_lr()
187190

@@ -221,8 +224,11 @@ def __init__(self, optimizer, num_steps, decay=0.5, warmup=0., min_lr=0.,
221224

222225
def get_lr(self):
223226
if not self._get_lr_called_within_step:
224-
warnings.warn("To get the last learning rate computed by the scheduler, "
225-
"please use `get_last_lr()`.")
227+
warnings.warn(
228+
"To get the last learning rate computed by the scheduler, "
229+
"please use `get_last_lr()`.",
230+
stacklevel=1,
231+
)
226232

227233
return self._get_closed_form_lr()
228234

@@ -253,8 +259,11 @@ def __init__(self, optimizer, warmup=0., last_epoch=-1, verbose=False):
253259

254260
def get_lr(self):
255261
if not self._get_lr_called_within_step:
256-
warnings.warn("To get the last learning rate computed by the scheduler, "
257-
"please use `get_last_lr()`.")
262+
warnings.warn(
263+
"To get the last learning rate computed by the scheduler, "
264+
"please use `get_last_lr()`.",
265+
stacklevel=1,
266+
)
258267

259268
return self._get_closed_form_lr()
260269

0 commit comments

Comments
 (0)