Skip to content

Add stacklevel to across captum to satisfy flake8 #1382

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion captum/_utils/av.py
Original file line number Diff line number Diff line change
@@ -330,7 +330,8 @@ def _manage_loading_layers(
"Overwriting activations: load_from_disk is set to False. Removing all "
f"activations matching specified parameters {{path: {path}, "
f"model_id: {model_id}, layers: {layers}, identifier: {identifier}}} "
"before generating new activations."
"before generating new activations.",
stacklevel=1,
)
for layer in layers:
files = glob.glob(
18 changes: 9 additions & 9 deletions captum/_utils/common.py
Original file line number Diff line number Diff line change
@@ -73,17 +73,17 @@ def safe_div(
@typing.overload
# pyre-fixme[43]: The return type of overloaded function `_is_tuple` (`Literal[]`)
# is incompatible with the return type of the implementation (`bool`).
# pyre-fixme[31]: Expression `Literal[False]` is not a valid type.
# pyre-fixme[31]: Expression `Literal[True]` is not a valid type.
# pyre-fixme[24]: Non-generic type `typing.Literal` cannot take parameters.
def _is_tuple(inputs: Tensor) -> Literal[False]: ...
def _is_tuple(inputs: Tuple[Tensor, ...]) -> Literal[True]: ...


@typing.overload
# pyre-fixme[43]: The return type of overloaded function `_is_tuple` (`Literal[]`)
# is incompatible with the return type of the implementation (`bool`).
# pyre-fixme[31]: Expression `Literal[True]` is not a valid type.
# pyre-fixme[31]: Expression `Literal[False]` is not a valid type.
# pyre-fixme[24]: Non-generic type `typing.Literal` cannot take parameters.
def _is_tuple(inputs: Tuple[Tensor, ...]) -> Literal[True]: ...
def _is_tuple(inputs: Tensor) -> Literal[False]: ...


def _is_tuple(inputs: Union[Tensor, Tuple[Tensor, ...]]) -> bool:
@@ -277,7 +277,7 @@ def _format_additional_forward_args(


@overload
def _format_additional_forward_args(
def _format_additional_forward_args( # type: ignore
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any,
# pyre-fixme[24]: Generic type `tuple` expects at least 1 type parameter.
@@ -780,10 +780,10 @@ def _reduce_list(
"""
assert len(val_list) > 0, "Cannot reduce empty list!"
if isinstance(val_list[0], torch.Tensor):
# pyre-fixme[16]: `bool` has no attribute `device`.
first_device = val_list[0].device
# pyre-fixme[16]: `bool` has no attribute `to`.
return red_func([elem.to(first_device) for elem in val_list])
first_device = cast(Tensor, val_list[0]).device
return red_func(
[elem.to(first_device) for elem in cast(List[Tensor], val_list)]
)
elif isinstance(val_list[0], bool):
# pyre-fixme[7]: Expected `TupleOrTensorOrBoolGeneric` but got `bool`.
return any(val_list)
25 changes: 13 additions & 12 deletions captum/_utils/gradient.py
Original file line number Diff line number Diff line change
@@ -159,33 +159,34 @@ def _neuron_gradients(

@typing.overload
# pyre-fixme[43]: The implementation of `_forward_layer_eval` does not accept all
# possible arguments of overload defined on line `158`.
# possible arguments of overload defined on line `170`.
def _forward_layer_eval(
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
forward_fn: Callable,
inputs: Union[Tensor, Tuple[Tensor, ...]],
layer: Module,
layer: List[Module],
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
device_ids: Union[None, List[int]] = None,
attribute_to_layer_input: bool = False,
grad_enabled: bool = False,
) -> Tuple[Tensor, ...]: ...
) -> List[Tuple[Tensor, ...]]: ...


@typing.overload
# pyre-fixme[43]: The implementation of `_forward_layer_eval` does not accept all
# possible arguments of overload defined on line `170`.
# possible arguments of overload defined on line `158`.
def _forward_layer_eval(
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
forward_fn: Callable,
inputs: Union[Tensor, Tuple[Tensor, ...]],
layer: List[Module],
layer: Module,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
device_ids: Union[None, List[int]] = None,
attribute_to_layer_input: bool = False,
grad_enabled: bool = False,
) -> List[Tuple[Tensor, ...]]: ...
) -> Tuple[Tensor, ...]: ...


def _forward_layer_eval(
@@ -434,34 +435,34 @@ def _forward_layer_eval_with_neuron_grads(

@typing.overload
# pyre-fixme[43]: The implementation of `_forward_layer_eval_with_neuron_grads` does
# not accept all possible arguments of overload defined on line `392`.
# not accept all possible arguments of overload defined on line `405`.
def _forward_layer_eval_with_neuron_grads(
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
forward_fn: Callable,
inputs: Union[Tensor, Tuple[Tensor, ...]],
layer: Module,
layer: List[Module],
additional_forward_args: Any = None,
gradient_neuron_selector: None = None,
grad_enabled: bool = False,
device_ids: Union[None, List[int]] = None,
attribute_to_layer_input: bool = False,
) -> Tuple[Tensor, ...]: ...
) -> List[Tuple[Tensor, ...]]: ...


@typing.overload
# pyre-fixme[43]: The implementation of `_forward_layer_eval_with_neuron_grads` does
# not accept all possible arguments of overload defined on line `405`.
# not accept all possible arguments of overload defined on line `392`.
def _forward_layer_eval_with_neuron_grads(
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
forward_fn: Callable,
inputs: Union[Tensor, Tuple[Tensor, ...]],
layer: List[Module],
layer: Module,
additional_forward_args: Any = None,
gradient_neuron_selector: None = None,
grad_enabled: bool = False,
device_ids: Union[None, List[int]] = None,
attribute_to_layer_input: bool = False,
) -> List[Tuple[Tensor, ...]]: ...
) -> Tuple[Tensor, ...]: ...


def _forward_layer_eval_with_neuron_grads(
3 changes: 2 additions & 1 deletion captum/_utils/progress.py
Original file line number Diff line number Diff line change
@@ -214,7 +214,8 @@ def progress(
warnings.warn(
"Tried to show progress with tqdm "
"but tqdm is not installed. "
"Fall back to simply print out the progress."
"Fall back to simply print out the progress.",
stacklevel=1,
)
return SimpleProgress(
iterable, desc=desc, total=total, file=file, mininterval=mininterval
42 changes: 22 additions & 20 deletions captum/attr/_core/deep_lift.py
Original file line number Diff line number Diff line change
@@ -118,36 +118,37 @@ def __init__(

@typing.overload
# pyre-fixme[43]: The implementation of `attribute` does not accept all possible
# arguments of overload defined on line `120`.
# arguments of overload defined on line `131`.
def attribute(
self,
inputs: TensorOrTupleOfTensorsGeneric,
baselines: BaselineType = None,
target: TargetType = None,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
# pyre-fixme[9]: return_convergence_delta has type `Literal[]`; used as `bool`.
# pyre-fixme[31]: Expression `Literal[False]` is not a valid type.
*,
# pyre-fixme[31]: Expression `Literal[True]` is not a valid type.
# pyre-fixme[24]: Non-generic type `typing.Literal` cannot take parameters.
return_convergence_delta: Literal[False] = False,
return_convergence_delta: Literal[True],
custom_attribution_func: Union[None, Callable[..., Tuple[Tensor, ...]]] = None,
) -> TensorOrTupleOfTensorsGeneric: ...
) -> Tuple[TensorOrTupleOfTensorsGeneric, Tensor]: ...

@typing.overload
# pyre-fixme[43]: The implementation of `attribute` does not accept all possible
# arguments of overload defined on line `131`.
# arguments of overload defined on line `120`.
def attribute(
self,
inputs: TensorOrTupleOfTensorsGeneric,
baselines: BaselineType = None,
target: TargetType = None,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
*,
# pyre-fixme[31]: Expression `Literal[True]` is not a valid type.
# pyre-fixme[9]: return_convergence_delta has type `Literal[]`; used as `bool`.
# pyre-fixme[31]: Expression `Literal[False]` is not a valid type.
# pyre-fixme[24]: Non-generic type `typing.Literal` cannot take parameters.
return_convergence_delta: Literal[True],
return_convergence_delta: Literal[False] = False,
custom_attribution_func: Union[None, Callable[..., Tuple[Tensor, ...]]] = None,
) -> Tuple[TensorOrTupleOfTensorsGeneric, Tensor]: ...
) -> TensorOrTupleOfTensorsGeneric: ...

@log_usage()
def attribute( # type: ignore
@@ -636,7 +637,7 @@ def __init__(self, model: Module, multiply_by_inputs: bool = True) -> None:
# DeepLiftShap.attribute, so we ignore typing here
@typing.overload # type: ignore
# pyre-fixme[43]: The implementation of `attribute` does not accept all possible
# arguments of overload defined on line `584`.
# arguments of overload defined on line `597`.
def attribute(
self,
inputs: TensorOrTupleOfTensorsGeneric,
@@ -646,30 +647,31 @@ def attribute(
target: TargetType = None,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
# pyre-fixme[9]: return_convergence_delta has type `Literal[]`; used as `bool`.
# pyre-fixme[31]: Expression `Literal[False]` is not a valid type.
*,
# pyre-fixme[31]: Expression `Literal[True]` is not a valid type.
# pyre-fixme[24]: Non-generic type `typing.Literal` cannot take parameters.
return_convergence_delta: Literal[False] = False,
return_convergence_delta: Literal[True],
custom_attribution_func: Union[None, Callable[..., Tuple[Tensor, ...]]] = None,
) -> TensorOrTupleOfTensorsGeneric: ...
) -> Tuple[TensorOrTupleOfTensorsGeneric, Tensor]: ...

@typing.overload
# pyre-fixme[43]: The implementation of `attribute` does not accept all possible
# arguments of overload defined on line `597`.
# arguments of overload defined on line `584`.
def attribute(
self,
inputs: TensorOrTupleOfTensorsGeneric,
baselines: Union[
TensorOrTupleOfTensorsGeneric, Callable[..., TensorOrTupleOfTensorsGeneric]
],
target: TargetType = None,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
*,
# pyre-fixme[31]: Expression `Literal[True]` is not a valid type.
# pyre-fixme[9]: return_convergence_delta has type `Literal[]`; used as `bool`.
# pyre-fixme[31]: Expression `Literal[False]` is not a valid type.
# pyre-fixme[24]: Non-generic type `typing.Literal` cannot take parameters.
return_convergence_delta: Literal[True],
return_convergence_delta: Literal[False] = False,
custom_attribution_func: Union[None, Callable[..., Tuple[Tensor, ...]]] = None,
) -> Tuple[TensorOrTupleOfTensorsGeneric, Tensor]: ...
) -> TensorOrTupleOfTensorsGeneric: ...

@log_usage()
def attribute( # type: ignore
3 changes: 2 additions & 1 deletion captum/attr/_core/guided_backprop_deconvnet.py
Original file line number Diff line number Diff line change
@@ -72,7 +72,8 @@ def attribute(
# set hooks for overriding ReLU gradients
warnings.warn(
"Setting backward hooks on ReLU activations."
"The hooks will be removed after the attribution is finished"
"The hooks will be removed after the attribution is finished",
stacklevel=1,
)
try:
self.model.apply(self._register_hooks)
3 changes: 2 additions & 1 deletion captum/attr/_core/guided_grad_cam.py
Original file line number Diff line number Diff line change
@@ -225,7 +225,8 @@ def attribute(
warnings.warn(
"Couldn't appropriately interpolate GradCAM attributions for some "
"input tensors, returning empty tensor for corresponding "
"attributions."
"attributions.",
stacklevel=1,
)
output_attr.append(torch.empty(0))

21 changes: 11 additions & 10 deletions captum/attr/_core/integrated_gradients.py
Original file line number Diff line number Diff line change
@@ -81,7 +81,7 @@ def __init__(
# a tuple with both attributions and deltas.
@typing.overload
# pyre-fixme[43]: The implementation of `attribute` does not accept all possible
# arguments of overload defined on line `82`.
# arguments of overload defined on line `95`.
def attribute(
self,
inputs: TensorOrTupleOfTensorsGeneric,
@@ -92,29 +92,30 @@ def attribute(
n_steps: int = 50,
method: str = "gausslegendre",
internal_batch_size: Union[None, int] = None,
# pyre-fixme[9]: return_convergence_delta has type `Literal[]`; used as `bool`.
# pyre-fixme[31]: Expression `Literal[False]` is not a valid type.
*,
# pyre-fixme[31]: Expression `Literal[True]` is not a valid type.
# pyre-fixme[24]: Non-generic type `typing.Literal` cannot take parameters.
return_convergence_delta: Literal[False] = False,
) -> TensorOrTupleOfTensorsGeneric: ...
return_convergence_delta: Literal[True],
) -> Tuple[TensorOrTupleOfTensorsGeneric, Tensor]: ...

@typing.overload
# pyre-fixme[43]: The implementation of `attribute` does not accept all possible
# arguments of overload defined on line `95`.
# arguments of overload defined on line `82`.
def attribute(
self,
inputs: TensorOrTupleOfTensorsGeneric,
baselines: BaselineType = None,
target: TargetType = None,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
n_steps: int = 50,
method: str = "gausslegendre",
internal_batch_size: Union[None, int] = None,
*,
# pyre-fixme[31]: Expression `Literal[True]` is not a valid type.
# pyre-fixme[9]: return_convergence_delta has type `Literal[]`; used as `bool`.
# pyre-fixme[31]: Expression `Literal[False]` is not a valid type.
# pyre-fixme[24]: Non-generic type `typing.Literal` cannot take parameters.
return_convergence_delta: Literal[True],
) -> Tuple[TensorOrTupleOfTensorsGeneric, Tensor]: ...
return_convergence_delta: Literal[False] = False,
) -> TensorOrTupleOfTensorsGeneric: ...

@log_usage()
def attribute( # type: ignore
6 changes: 4 additions & 2 deletions captum/attr/_core/layer/layer_activation.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env python3

# pyre-strict
from typing import Any, Callable, List, Tuple, Union
from typing import Any, Callable, cast, List, Tuple, Union

import torch
from captum._utils.common import _format_output
@@ -128,7 +128,9 @@ def attribute(
attribute_to_layer_input=attribute_to_layer_input,
)
if isinstance(self.layer, Module):
return _format_output(len(layer_eval) > 1, layer_eval)
return _format_output(
len(layer_eval) > 1, cast(Tuple[Tensor, ...], layer_eval)
)
else:
return [
# pyre-fixme[6]: For 2nd argument expected `Tuple[Tensor, ...]` but
Loading