Skip to content

Fix pyre errors in DeepLift #1391

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 16 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions captum/_utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,10 @@ def _is_tuple(inputs: Tuple[Tensor, ...]) -> Literal[True]: ...
def _is_tuple(inputs: Tensor) -> Literal[False]: ...


@typing.overload
def _is_tuple(inputs: TensorOrTupleOfTensorsGeneric) -> bool: ...


def _is_tuple(inputs: Union[Tensor, Tuple[Tensor, ...]]) -> bool:
return isinstance(inputs, tuple)

Expand Down
16 changes: 1 addition & 15 deletions captum/_utils/typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,11 @@

# pyre-strict

from typing import (
List,
Optional,
overload,
Protocol,
Tuple,
TYPE_CHECKING,
TypeVar,
Union,
)
from typing import List, Literal, Optional, overload, Protocol, Tuple, TypeVar, Union

from torch import Tensor
from torch.nn import Module

if TYPE_CHECKING:
from typing import Literal
else:
Literal = {True: bool, False: bool, (True, False): bool, "pt": str}

TensorOrTupleOfTensorsGeneric = TypeVar(
"TensorOrTupleOfTensorsGeneric", Tensor, Tuple[Tensor, ...]
)
Expand Down
209 changes: 61 additions & 148 deletions captum/attr/_core/deep_lift.py

Large diffs are not rendered by default.

44 changes: 13 additions & 31 deletions captum/attr/_core/feature_ablation.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,9 @@ class FeatureAblation(PerturbationAttribution):
first dimension (i.e. a feature mask requires to be applied to all inputs).
"""

# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
def __init__(self, forward_func: Callable) -> None:
def __init__(
self, forward_func: Callable[..., Union[int, float, Tensor, Future[Tensor]]]
) -> None:
r"""
Args:

Expand All @@ -74,8 +75,7 @@ def attribute(
inputs: TensorOrTupleOfTensorsGeneric,
baselines: BaselineType = None,
target: TargetType = None,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
additional_forward_args: object = None,
feature_mask: Union[None, Tensor, Tuple[Tensor, ...]] = None,
perturbations_per_eval: int = 1,
show_progress: bool = False,
Expand Down Expand Up @@ -261,17 +261,13 @@ def attribute(
"""
# Keeps track whether original input is a tuple or not before
# converting it into a tuple.
# pyre-fixme[6]: For 1st argument expected `Tensor` but got
# `TensorOrTupleOfTensorsGeneric`.
is_inputs_tuple = _is_tuple(inputs)

formatted_inputs, baselines = _format_input_baseline(inputs, baselines)
formatted_additional_forward_args = _format_additional_forward_args(
additional_forward_args
)
num_examples = formatted_inputs[0].shape[0]
# pyre-fixme[6]: For 2nd argument expected `Tuple[Tensor, ...]` but got
# `TensorOrTupleOfTensorsGeneric`.
formatted_feature_mask = _format_feature_mask(feature_mask, formatted_inputs)

assert (
Expand Down Expand Up @@ -384,8 +380,6 @@ def attribute(
# pyre-fixme[7]: Expected `Variable[TensorOrTupleOfTensorsGeneric <:
# [Tensor, typing.Tuple[Tensor, ...]]]`
# but got `Union[Tensor, typing.Tuple[Tensor, ...]]`.
# pyre-fixme[6]: In call `FeatureAblation._generate_result`,
# for 3rd positional argument, expected `bool` but got `Literal[]`.
return self._generate_result(total_attrib, weights, is_inputs_tuple) # type: ignore # noqa: E501 line too long

def _initial_eval_to_processed_initial_eval_fut(
Expand Down Expand Up @@ -414,8 +408,7 @@ def attribute_future(
inputs: TensorOrTupleOfTensorsGeneric,
baselines: BaselineType = None,
target: TargetType = None,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
additional_forward_args: object = None,
feature_mask: Union[None, Tensor, Tuple[Tensor, ...]] = None,
perturbations_per_eval: int = 1,
show_progress: bool = False,
Expand All @@ -428,8 +421,6 @@ def attribute_future(

# Keeps track whether original input is a tuple or not before
# converting it into a tuple.
# pyre-fixme[6]: For 1st argument expected `Tensor` but got
# `TensorOrTupleOfTensorsGeneric`.
is_inputs_tuple = _is_tuple(inputs)
formatted_inputs, baselines = _format_input_baseline(inputs, baselines)
formatted_additional_forward_args = _format_additional_forward_args(
Expand Down Expand Up @@ -660,13 +651,11 @@ def _eval_fut_to_ablated_out_fut(
) from e
return result

# pyre-fixme[3]: Return type must be specified as type that does not contain `Any`
def _ith_input_ablation_generator(
self,
i: int,
inputs: TensorOrTupleOfTensorsGeneric,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_args: Any,
additional_args: object,
target: TargetType,
baselines: BaselineType,
input_mask: Union[None, Tensor, Tuple[Tensor, ...]],
Expand All @@ -675,7 +664,7 @@ def _ith_input_ablation_generator(
) -> Generator[
Tuple[
Tuple[Tensor, ...],
Any,
object,
TargetType,
Tensor,
],
Expand Down Expand Up @@ -705,10 +694,9 @@ def _ith_input_ablation_generator(
perturbations_per_eval = min(perturbations_per_eval, num_features)
baseline = baselines[i] if isinstance(baselines, tuple) else baselines
if isinstance(baseline, torch.Tensor):
# pyre-fixme[58]: `+` is not supported for operand types `Tuple[int]`
# and `Size`.
baseline = baseline.reshape((1,) + baseline.shape)
baseline = baseline.reshape((1,) + tuple(baseline.shape))

additional_args_repeated: object
if perturbations_per_eval > 1:
# Repeat features and additional args for batch size.
all_features_repeated = [
Expand All @@ -727,6 +715,7 @@ def _ith_input_ablation_generator(
target_repeated = target

num_features_processed = min_feature
current_additional_args: object
while num_features_processed < num_features:
current_num_ablated_features = min(
perturbations_per_eval, num_features - num_features_processed
Expand Down Expand Up @@ -762,9 +751,7 @@ def _ith_input_ablation_generator(
# dimension of this tensor.
current_reshaped = current_features[i].reshape(
(current_num_ablated_features, -1)
# pyre-fixme[58]: `+` is not supported for operand types
# `Tuple[int, int]` and `Size`.
+ current_features[i].shape[1:]
+ tuple(current_features[i].shape[1:])
)

ablated_features, current_mask = self._construct_ablated_input(
Expand All @@ -780,10 +767,7 @@ def _ith_input_ablation_generator(
# (current_num_ablated_features * num_examples, inputs[i].shape[1:]),
# which can be provided to the model as input.
current_features[i] = ablated_features.reshape(
(-1,)
# pyre-fixme[58]: `+` is not supported for operand types
# `Tuple[int]` and `Size`.
+ ablated_features.shape[2:]
(-1,) + tuple(ablated_features.shape[2:])
)
yield tuple(
current_features
Expand Down Expand Up @@ -818,9 +802,7 @@ def _construct_ablated_input(
thus counted towards ablations for that feature) and 0s otherwise.
"""
current_mask = torch.stack(
# pyre-fixme[6]: For 1st argument expected `Union[List[Tensor],
# Tuple[Tensor, ...]]` but got `List[Union[bool, Tensor]]`.
[input_mask == j for j in range(start_feature, end_feature)], # type: ignore # noqa: E501 line too long
cast(List[Tensor], [input_mask == j for j in range(start_feature, end_feature)]), # type: ignore # noqa: E501 line too long
dim=0,
).long()
current_mask = current_mask.to(expanded_input.device)
Expand Down
12 changes: 4 additions & 8 deletions captum/attr/_core/feature_permutation.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,10 +74,8 @@ class FeaturePermutation(FeatureAblation):

def __init__(
self,
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
forward_func: Callable,
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
perm_func: Callable = _permute_feature,
forward_func: Callable[..., Union[int, float, Tensor, Future[Tensor]]],
perm_func: Callable[[Tensor, Tensor], Tensor] = _permute_feature,
) -> None:
r"""
Args:
Expand All @@ -101,8 +99,7 @@ def attribute( # type: ignore
self,
inputs: TensorOrTupleOfTensorsGeneric,
target: TargetType = None,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
additional_forward_args: object = None,
feature_mask: Union[None, TensorOrTupleOfTensorsGeneric] = None,
perturbations_per_eval: int = 1,
show_progress: bool = False,
Expand Down Expand Up @@ -283,8 +280,7 @@ def attribute_future(
self,
inputs: TensorOrTupleOfTensorsGeneric,
target: TargetType = None,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any = None,
additional_forward_args: object = None,
feature_mask: Union[None, TensorOrTupleOfTensorsGeneric] = None,
perturbations_per_eval: int = 1,
show_progress: bool = False,
Expand Down
Loading
Loading