Skip to content

Commit e20a566

Browse files
committed
Fix lint
1 parent 1a99017 commit e20a566

File tree

5 files changed

+7
-6
lines changed

5 files changed

+7
-6
lines changed

tests/attr/helpers/conductance_reference.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/usr/bin/env python3
2-
from typing import cast, Optional, Tuple, Union
2+
from typing import cast, Tuple, Union
33

44
import numpy as np
55
import torch

tests/attr/models/test_pytext.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import os
66
import tempfile
77
import unittest
8-
from typing import Dict, List, NoReturn, Optional
8+
from typing import Dict, List
99

1010
import torch
1111

tests/helpers/basic.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,11 @@
22
import copy
33
import random
44
import unittest
5-
from typing import Callable, List, Tuple, Union
5+
from typing import Callable
66

77
import numpy as np
88
import torch
99
from captum.log import patch_methods
10-
from torch import Tensor
1110

1211

1312
def deep_copy_args(func: Callable):

tests/robust/test_FGSM.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from typing import Any, Callable, List, Optional, Tuple, Union
33

44
import torch
5-
from captum._utils.typing import TensorLikeList, TensorOrTupleOfTensorsGeneric
5+
from captum._utils.typing import TensorOrTupleOfTensorsGeneric
66
from captum.robust import FGSM
77
from tests.helpers.basic import assertTensorAlmostEqual, BaseTest
88
from tests.helpers.basic_models import BasicModel, BasicModel2, BasicModel_MultiLayer

tests/utils/test_sample_gradient.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,9 @@ def test_sample_grads_layer_modules(self) -> None:
139139
# So, check that we did calculate sample grads for the desired
140140
# layers via the above checking approach.
141141
for parameter in module.parameters():
142-
assert not isinstance(parameter.sample_grad, int) # type: ignore
142+
assert not isinstance(
143+
parameter.sample_grad, int # type: ignore
144+
)
143145
else:
144146
# For the layers we do not want sample grads for, their
145147
# `sample_grad` should still be 0, since they should not have been

0 commit comments

Comments
 (0)