Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add stacklevel to across captum to satisfy flake8 #1382

Open
wants to merge 9 commits into
base: master
Choose a base branch
from
3 changes: 2 additions & 1 deletion captum/_utils/av.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,8 @@ def _manage_loading_layers(
"Overwriting activations: load_from_disk is set to False. Removing all "
f"activations matching specified parameters {{path: {path}, "
f"model_id: {model_id}, layers: {layers}, identifier: {identifier}}} "
"before generating new activations."
"before generating new activations.",
stacklevel=1,
)
for layer in layers:
files = glob.glob(
Expand Down
18 changes: 9 additions & 9 deletions captum/_utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,17 +73,17 @@ def safe_div(
@typing.overload
# pyre-fixme[43]: The return type of overloaded function `_is_tuple` (`Literal[]`)
# is incompatible with the return type of the implementation (`bool`).
# pyre-fixme[31]: Expression `Literal[False]` is not a valid type.
# pyre-fixme[31]: Expression `Literal[True]` is not a valid type.
# pyre-fixme[24]: Non-generic type `typing.Literal` cannot take parameters.
def _is_tuple(inputs: Tensor) -> Literal[False]: ...
def _is_tuple(inputs: Tuple[Tensor, ...]) -> Literal[True]: ...


@typing.overload
# pyre-fixme[43]: The return type of overloaded function `_is_tuple` (`Literal[]`)
# is incompatible with the return type of the implementation (`bool`).
# pyre-fixme[31]: Expression `Literal[True]` is not a valid type.
# pyre-fixme[31]: Expression `Literal[False]` is not a valid type.
# pyre-fixme[24]: Non-generic type `typing.Literal` cannot take parameters.
def _is_tuple(inputs: Tuple[Tensor, ...]) -> Literal[True]: ...
def _is_tuple(inputs: Tensor) -> Literal[False]: ...


def _is_tuple(inputs: Union[Tensor, Tuple[Tensor, ...]]) -> bool:
Expand Down Expand Up @@ -277,7 +277,7 @@ def _format_additional_forward_args(


@overload
def _format_additional_forward_args(
def _format_additional_forward_args( # type: ignore
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any,
# pyre-fixme[24]: Generic type `tuple` expects at least 1 type parameter.
Expand Down Expand Up @@ -780,10 +780,10 @@ def _reduce_list(
"""
assert len(val_list) > 0, "Cannot reduce empty list!"
if isinstance(val_list[0], torch.Tensor):
# pyre-fixme[16]: `bool` has no attribute `device`.
first_device = val_list[0].device
# pyre-fixme[16]: `bool` has no attribute `to`.
return red_func([elem.to(first_device) for elem in val_list])
first_device = cast(Tensor, val_list[0]).device
return red_func(
[elem.to(first_device) for elem in cast(List[Tensor], val_list)]
)
elif isinstance(val_list[0], bool):
# pyre-fixme[7]: Expected `TupleOrTensorOrBoolGeneric` but got `bool`.
return any(val_list)
Expand Down
25 changes: 13 additions & 12 deletions captum/_utils/gradient.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,33 +159,34 @@ def _neuron_gradients(

@typing.overload
# pyre-fixme[43]: The implementation of `_forward_layer_eval` does not accept all
# possible arguments of overload defined on line `158`.
# possible arguments of overload defined on line `170`.
def _forward_layer_eval(
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
forward_fn: Callable,
inputs: Union[Tensor, Tuple[Tensor, ...]],
layer: Module,
layer: List[Module],
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
device_ids: Union[None, List[int]] = None,
attribute_to_layer_input: bool = False,
grad_enabled: bool = False,
) -> Tuple[Tensor, ...]: ...
) -> List[Tuple[Tensor, ...]]: ...


@typing.overload
# pyre-fixme[43]: The implementation of `_forward_layer_eval` does not accept all
# possible arguments of overload defined on line `170`.
# possible arguments of overload defined on line `158`.
def _forward_layer_eval(
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
forward_fn: Callable,
inputs: Union[Tensor, Tuple[Tensor, ...]],
layer: List[Module],
layer: Module,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
device_ids: Union[None, List[int]] = None,
attribute_to_layer_input: bool = False,
grad_enabled: bool = False,
) -> List[Tuple[Tensor, ...]]: ...
) -> Tuple[Tensor, ...]: ...


def _forward_layer_eval(
Expand Down Expand Up @@ -434,34 +435,34 @@ def _forward_layer_eval_with_neuron_grads(

@typing.overload
# pyre-fixme[43]: The implementation of `_forward_layer_eval_with_neuron_grads` does
# not accept all possible arguments of overload defined on line `392`.
# not accept all possible arguments of overload defined on line `405`.
def _forward_layer_eval_with_neuron_grads(
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
forward_fn: Callable,
inputs: Union[Tensor, Tuple[Tensor, ...]],
layer: Module,
layer: List[Module],
additional_forward_args: Any = None,
gradient_neuron_selector: None = None,
grad_enabled: bool = False,
device_ids: Union[None, List[int]] = None,
attribute_to_layer_input: bool = False,
) -> Tuple[Tensor, ...]: ...
) -> List[Tuple[Tensor, ...]]: ...


@typing.overload
# pyre-fixme[43]: The implementation of `_forward_layer_eval_with_neuron_grads` does
# not accept all possible arguments of overload defined on line `405`.
# not accept all possible arguments of overload defined on line `392`.
def _forward_layer_eval_with_neuron_grads(
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
forward_fn: Callable,
inputs: Union[Tensor, Tuple[Tensor, ...]],
layer: List[Module],
layer: Module,
additional_forward_args: Any = None,
gradient_neuron_selector: None = None,
grad_enabled: bool = False,
device_ids: Union[None, List[int]] = None,
attribute_to_layer_input: bool = False,
) -> List[Tuple[Tensor, ...]]: ...
) -> Tuple[Tensor, ...]: ...


def _forward_layer_eval_with_neuron_grads(
Expand Down
Loading
Loading