From 16aec5cda7d94230285e046d9f74d37e95033251 Mon Sep 17 00:00:00 2001 From: "Pyre Bot Jr." Date: Wed, 16 Oct 2024 17:18:04 -0700 Subject: [PATCH] Add type error suppressions for upcoming upgrade Reviewed By: MaggieMoss Differential Revision: D64503973 --- captum/attr/_core/deep_lift.py | 3 ++ captum/attr/_core/noise_tunnel.py | 2 ++ captum/metrics/_core/infidelity.py | 3 ++ captum/metrics/_core/sensitivity.py | 2 ++ .../_core/test_tracin_k_most_influential.py | 31 +++++++++++++++++++ .../influence/_core/test_tracin_regression.py | 13 ++++++++ .../_core/test_tracin_self_influence.py | 12 +++++++ 7 files changed, 66 insertions(+) diff --git a/captum/attr/_core/deep_lift.py b/captum/attr/_core/deep_lift.py index e03437fb3..eca75f1d5 100644 --- a/captum/attr/_core/deep_lift.py +++ b/captum/attr/_core/deep_lift.py @@ -487,6 +487,9 @@ def _backward_hook( "network.".format(module) ) + # pyre-fixme[6]: For 1st argument expected `Type[Union[ELU, LeakyReLU, ReLU, + # Sigmoid, Softmax, Softplus, Tanh, MaxPool1d, MaxPool2d, MaxPool3d]]` but got + # `Type[Module]`. multipliers = SUPPORTED_NON_LINEAR[type(module)]( module, module.input, diff --git a/captum/attr/_core/noise_tunnel.py b/captum/attr/_core/noise_tunnel.py index eb34eda85..bc9d2b88f 100644 --- a/captum/attr/_core/noise_tunnel.py +++ b/captum/attr/_core/noise_tunnel.py @@ -261,6 +261,8 @@ def update_sum_attribution_and_sq( else sum_attribution_sq[i] + current_attribution_sq ) + # pyre-fixme[53]: Captured variable `attr_func` is not annotated. + # pyre-fixme[53]: Captured variable `is_inputs_tuple` is not annotated. def compute_partial_attribution( inputs_with_noise_partition: Tuple[Tensor, ...], # pyre-fixme[2]: Parameter annotation cannot be `Any`. diff --git a/captum/metrics/_core/infidelity.py b/captum/metrics/_core/infidelity.py index 001fd1805..29115a719 100644 --- a/captum/metrics/_core/infidelity.py +++ b/captum/metrics/_core/infidelity.py @@ -422,6 +422,8 @@ def _generate_perturbations( """ # pyre-fixme[3]: Return type must be annotated. + # pyre-fixme[53]: Captured variable `baselines_expanded` is not annotated. + # pyre-fixme[53]: Captured variable `inputs_expanded` is not annotated. def call_perturb_func(): r""" """ baselines_pert = None @@ -485,6 +487,7 @@ def _validate_inputs_and_perturbations( is: {}""" ).format(perturb[0].shape, input_perturbed[0].shape) + # pyre-fixme[53]: Captured variable `bsz` is not annotated. def _next_infidelity_tensors( current_n_perturb_samples: int, ) -> Union[Tuple[Tensor], Tuple[Tensor, Tensor, Tensor]]: diff --git a/captum/metrics/_core/sensitivity.py b/captum/metrics/_core/sensitivity.py index 381dbbc44..b4b0190ea 100644 --- a/captum/metrics/_core/sensitivity.py +++ b/captum/metrics/_core/sensitivity.py @@ -232,6 +232,8 @@ def max_values(input_tnsr: Tensor) -> Tensor: # pyre-fixme[33]: Given annotation cannot be `Any`. kwargs_copy: Any = None + # pyre-fixme[53]: Captured variable `bsz` is not annotated. + # pyre-fixme[53]: Captured variable `expl_inputs` is not annotated. def _next_sensitivity_max(current_n_perturb_samples: int) -> Tensor: inputs_perturbed = _generate_perturbations(current_n_perturb_samples) diff --git a/tests/influence/_core/test_tracin_k_most_influential.py b/tests/influence/_core/test_tracin_k_most_influential.py index 08224a60e..c41f8514d 100644 --- a/tests/influence/_core/test_tracin_k_most_influential.py +++ b/tests/influence/_core/test_tracin_k_most_influential.py @@ -24,10 +24,18 @@ class TestTracInGetKMostInfluential(BaseTest): param_list: List[ Tuple[str, DataInfluenceConstructor, bool, bool, int, int, str, bool] ] = [] + # pyre-fixme[16]: `type` has no attribute `batch_size`. + # pyre-fixme[16]: `type` has no attribute `k`. for batch_size, k in [(4, 7), (7, 4), (40, 5), (5, 40), (40, 45)]: + # pyre-fixme[16]: `type` has no attribute `unpack_inputs`. for unpack_inputs in [True, False]: + # pyre-fixme[16]: `type` has no attribute `proponents`. for proponents in [True, False]: + # pyre-fixme[16]: `type` has no attribute `gpu_setting`. for gpu_setting in GPU_SETTING_LIST: + # pyre-fixme[16]: `type` has no attribute `reduction`. + # pyre-fixme[16]: `type` has no attribute `constr`. + # pyre-fixme[16]: `type` has no attribute `aggregate`. for reduction, constr, aggregate in [ ( "none", @@ -50,6 +58,9 @@ class TestTracInGetKMostInfluential(BaseTest): name="linear2", layers=( ["module.linear2"] + # pyre-fixme[16]: + # `TestTracInGetKMostInfluential` has no attribute + # `gpu_setting`. if gpu_setting == "cuda_data_parallel" else ["linear2"] ), @@ -58,19 +69,39 @@ class TestTracInGetKMostInfluential(BaseTest): ), ]: if not ( + # pyre-fixme[16]: `TestTracInGetKMostInfluential` has no + # attribute `constr`. "sample_wise_grads_per_batch" in constr.kwargs and constr.kwargs["sample_wise_grads_per_batch"] and is_gpu(gpu_setting) ): param_list.append( ( + # pyre-fixme[16]: + # `TestTracInGetKMostInfluential` has no attribute + # `reduction`. reduction, constr, + # pyre-fixme[16]: + # `TestTracInGetKMostInfluential` has no attribute + # `unpack_inputs`. unpack_inputs, + # pyre-fixme[16]: + # `TestTracInGetKMostInfluential` has no attribute + # `proponents`. proponents, + # pyre-fixme[16]: + # `TestTracInGetKMostInfluential` has no attribute + # `batch_size`. batch_size, + # pyre-fixme[16]: + # `TestTracInGetKMostInfluential` has no attribute + # `k`. k, gpu_setting, + # pyre-fixme[16]: + # `TestTracInGetKMostInfluential` has no attribute + # `aggregate`. aggregate, ) ) diff --git a/tests/influence/_core/test_tracin_regression.py b/tests/influence/_core/test_tracin_regression.py index 960909169..c09c5e2d6 100644 --- a/tests/influence/_core/test_tracin_regression.py +++ b/tests/influence/_core/test_tracin_regression.py @@ -60,8 +60,13 @@ def _test_tracin_regression_setup( param_list: List[Tuple[Optional[str], DataInfluenceConstructor, str, int, bool]] = ( [] ) + # pyre-fixme[16]: `type` has no attribute `use_gpu`. for use_gpu in use_gpu_list: + # pyre-fixme[16]: `type` has no attribute `dim`. for dim in [1, 20]: + # pyre-fixme[16]: `type` has no attribute `mode`. + # pyre-fixme[16]: `type` has no attribute `reduction`. + # pyre-fixme[16]: `type` has no attribute `constructor`. for mode, reduction, constructor in [ ( "check_idx", @@ -74,6 +79,8 @@ def _test_tracin_regression_setup( DataInfluenceConstructor( TracInCP, name="TracInCP_fc1", + # pyre-fixme[16]: `TestTracInRegression` has no attribute + # `use_gpu`. layers=["module.fc1"] if use_gpu else ["fc1"], ), ), @@ -138,7 +145,13 @@ def _test_tracin_regression_setup( ), # add a test where `duplicate_loss_fn` is True ), ]: + # pyre-fixme[16]: `TestTracInRegression` has no attribute `mode`. if not (mode == "sample_wise_trick" and use_gpu): + # pyre-fixme[16]: `TestTracInRegression` has no attribute + # `reduction`. + # pyre-fixme[16]: `TestTracInRegression` has no attribute + # `constructor`. + # pyre-fixme[16]: `TestTracInRegression` has no attribute `dim`. param_list.append((reduction, constructor, mode, dim, use_gpu)) # pyre-fixme[56]: Pyre was not able to infer the type of argument diff --git a/tests/influence/_core/test_tracin_self_influence.py b/tests/influence/_core/test_tracin_self_influence.py index f93e6c74f..78f87637f 100644 --- a/tests/influence/_core/test_tracin_self_influence.py +++ b/tests/influence/_core/test_tracin_self_influence.py @@ -31,8 +31,12 @@ class TestTracInSelfInfluence(BaseTest): # add tests for `TracInCPBase` implementations + # pyre-fixme[16]: `type` has no attribute `unpack_inputs`. for unpack_inputs in [True, False]: + # pyre-fixme[16]: `type` has no attribute `gpu_setting`. for gpu_setting in GPU_SETTING_LIST: + # pyre-fixme[16]: `type` has no attribute `reduction`. + # pyre-fixme[16]: `type` has no attribute `constructor`. for reduction, constructor in [ ( "none", @@ -45,6 +49,8 @@ class TestTracInSelfInfluence(BaseTest): name="TracInCP_linear1", layers=( ["module.linear1"] + # pyre-fixme[16]: `TestTracInSelfInfluence` has no + # attribute `gpu_setting`. if gpu_setting == "cuda_data_parallel" else ["linear1"] ), @@ -84,11 +90,17 @@ class TestTracInSelfInfluence(BaseTest): ), ]: if not ( + # pyre-fixme[16]: `TestTracInSelfInfluence` has no attribute + # `constructor`. "sample_wise_grads_per_batch" in constructor.kwargs and constructor.kwargs["sample_wise_grads_per_batch"] and is_gpu(gpu_setting) ): param_list.append( + # pyre-fixme[16]: `TestTracInSelfInfluence` has no attribute + # `reduction`. + # pyre-fixme[16]: `TestTracInSelfInfluence` has no attribute + # `unpack_inputs`. (reduction, constructor, unpack_inputs, gpu_setting) )