The following issues were found

tools/autograd/gen_autograd_functions.py
29 issues
Attempted relative import beyond top-level package
Error

Line: 7 Column: 1

              #  Functions.h/cpp: subclasses of autograd::Node
#  python_functions.h/cpp: Python bindings for the above classes
#
from .gen_inplace_or_view_type import VIEW_FUNCTIONS

from typing import List, Sequence, Tuple

from tools.codegen.api.autograd import (Derivative, DifferentiabilityInfo,
                                        SavedAttribute, uses_retain_variables,

            

Reported by Pylint.

TODO: This is probably not exhaustive, but it's a start
Error

Line: 292 Column: 3

              # VIEW_FUNCTIONS are not traceable because they use as_strided, which
# has an untraceable backwards, see
# https://github.com/pytorch/pytorch/issues/4250
# TODO: This is probably not exhaustive, but it's a start
UNTRACEABLE_FUNCTIONS = VIEW_FUNCTIONS

def gen_autograd_functions_lib(
    out: str,
    differentiability_infos: Sequence[DifferentiabilityInfo],

            

Reported by Pylint.

Cell variable fname defined in loop
Error

Line: 316 Column: 79

                  for suffix in ['.h', '.cpp']:
        fname = file_basename + suffix
        fm.write_with_template(fname, fname, lambda: {
            'generated_comment': '@' + f'generated from {fm.template_dir}/' + fname,
            'autograd_function_declarations': declarations,
            'autograd_function_definitions': definitions,
        })

def gen_autograd_functions_python(

            

Reported by Pylint.

Redefining built-in 'type'
Error

Line: 377 Column: 9

              
    def save_var(var: SavedAttribute, is_output: bool) -> None:
        name = var.nctype.name
        type = var.nctype.type
        should_append_getsetdef = True
        should_append_raw_getsetdef = False

        if type == BaseCType(tensorT) or type == OptionalCType(BaseCType(tensorT)) or \
                type == MutRefCType(OptionalCType(BaseCType(tensorT))) or \

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # Generates C++ autograd functions for the derivatives of ATen operations
#
# This writes two files:
#  Functions.h/cpp: subclasses of autograd::Node
#  python_functions.h/cpp: Python bindings for the above classes
#
from .gen_inplace_or_view_type import VIEW_FUNCTIONS

from typing import List, Sequence, Tuple

            

Reported by Pylint.

standard import "from typing import List, Sequence, Tuple" should be placed before "from .gen_inplace_or_view_type import VIEW_FUNCTIONS"
Error

Line: 9 Column: 1

              #
from .gen_inplace_or_view_type import VIEW_FUNCTIONS

from typing import List, Sequence, Tuple

from tools.codegen.api.autograd import (Derivative, DifferentiabilityInfo,
                                        SavedAttribute, uses_retain_variables,
                                        uses_single_grad)
from tools.codegen.api.types import (Binding, BaseCType, OptionalCType, tensorT, intT,

            

Reported by Pylint.

third party import "from tools.codegen.api.autograd import Derivative, DifferentiabilityInfo, SavedAttribute, uses_retain_variables, uses_single_grad" should be placed before "from .gen_inplace_or_view_type import VIEW_FUNCTIONS"
Error

Line: 11 Column: 1

              
from typing import List, Sequence, Tuple

from tools.codegen.api.autograd import (Derivative, DifferentiabilityInfo,
                                        SavedAttribute, uses_retain_variables,
                                        uses_single_grad)
from tools.codegen.api.types import (Binding, BaseCType, OptionalCType, tensorT, intT,
                                     doubleT, scalarT, stringT, boolT, intArrayRefT,
                                     tensorListT, MutRefCType, ListCType, ArrayRefCType)

            

Reported by Pylint.

third party import "from tools.codegen.api.types import Binding, BaseCType, OptionalCType, tensorT, intT, doubleT, scalarT, stringT, boolT, intArrayRefT, tensorListT, MutRefCType, ListCType, ArrayRefCType" should be placed before "from .gen_inplace_or_view_type import VIEW_FUNCTIONS"
Error

Line: 14 Column: 1

              from tools.codegen.api.autograd import (Derivative, DifferentiabilityInfo,
                                        SavedAttribute, uses_retain_variables,
                                        uses_single_grad)
from tools.codegen.api.types import (Binding, BaseCType, OptionalCType, tensorT, intT,
                                     doubleT, scalarT, stringT, boolT, intArrayRefT,
                                     tensorListT, MutRefCType, ListCType, ArrayRefCType)
from tools.codegen.code_template import CodeTemplate
from tools.codegen.gen import FileManager
from tools.codegen.model import Argument

            

Reported by Pylint.

third party import "from tools.codegen.code_template import CodeTemplate" should be placed before "from .gen_inplace_or_view_type import VIEW_FUNCTIONS"
Error

Line: 17 Column: 1

              from tools.codegen.api.types import (Binding, BaseCType, OptionalCType, tensorT, intT,
                                     doubleT, scalarT, stringT, boolT, intArrayRefT,
                                     tensorListT, MutRefCType, ListCType, ArrayRefCType)
from tools.codegen.code_template import CodeTemplate
from tools.codegen.gen import FileManager
from tools.codegen.model import Argument

FUNCTION_DECLARATION = CodeTemplate("""\
struct TORCH_API ${op} : public ${superclass} {

            

Reported by Pylint.

third party import "from tools.codegen.gen import FileManager" should be placed before "from .gen_inplace_or_view_type import VIEW_FUNCTIONS"
Error

Line: 18 Column: 1

                                                   doubleT, scalarT, stringT, boolT, intArrayRefT,
                                     tensorListT, MutRefCType, ListCType, ArrayRefCType)
from tools.codegen.code_template import CodeTemplate
from tools.codegen.gen import FileManager
from tools.codegen.model import Argument

FUNCTION_DECLARATION = CodeTemplate("""\
struct TORCH_API ${op} : public ${superclass} {
  using ${superclass}::${superclass};

            

Reported by Pylint.

torch/jit/__init__.py
29 issues
Access to a protected member _c of a client class
Error

Line: 67 Column: 37

                      have a LiteScriptModule and want to get the currently present
        list of ops call _export_operator_list instead.
    """
    return torch._C._export_opnames(m._c)


# torch.jit.Error
Error = torch._C.JITException
set_module(Error, "torch.jit")

            

Reported by Pylint.

Access to a protected member _export_opnames of a client class
Error

Line: 67 Column: 12

                      have a LiteScriptModule and want to get the currently present
        list of ops call _export_operator_list instead.
    """
    return torch._C._export_opnames(m._c)


# torch.jit.Error
Error = torch._C.JITException
set_module(Error, "torch.jit")

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 67 Column: 12

                      have a LiteScriptModule and want to get the currently present
        list of ops call _export_operator_list instead.
    """
    return torch._C._export_opnames(m._c)


# torch.jit.Error
Error = torch._C.JITException
set_module(Error, "torch.jit")

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 71 Column: 9

              

# torch.jit.Error
Error = torch._C.JITException
set_module(Error, "torch.jit")
# This is not perfect but works in common cases
Error.__name__ = "Error"
Error.__qualname__ = "Error"


            

Reported by Pylint.

Unused argument 'the_type'
Error

Line: 78 Column: 14

              Error.__qualname__ = "Error"

# for use in python if using annotate
def annotate(the_type, the_value):
    """
    This method is a pass-through function that returns `the_value`, used to hint TorchScript
    compiler the type of `the_value`. It is a no-op when running outside of TorchScript.

    Though TorchScript can infer correct type for most Python expressions, there are some cases where

            

Reported by Pylint.

Redefining built-in 'isinstance'
Error

Line: 147 Column: 1

              

# for torch.jit.isinstance
def isinstance(obj, target_type):
    """
    This function provides for conatiner type refinement in TorchScript. It can refine
    parameterized containers of the List, Dict, Tuple, and Optional types. E.g. ``List[str]``,
    ``Dict[str, List[torch.Tensor]]``, ``Optional[Tuple[int,str,int]]``. It can also
    refine basic types such as bools and ints that are available in TorchScript.

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 194 Column: 32

              # Graph class, so mypy checks need to be skipped.
@contextmanager
def _hide_source_ranges() -> Iterator[None]:
    old_enable_source_ranges = torch._C.Graph.global_print_source_ranges  # type: ignore[attr-defined]
    try:
        torch._C.Graph.set_global_print_source_ranges(False)  # type: ignore[attr-defined]
        yield
    finally:
        torch._C.Graph.set_global_print_source_ranges(old_enable_source_ranges)  # type: ignore[attr-defined]

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 196 Column: 9

              def _hide_source_ranges() -> Iterator[None]:
    old_enable_source_ranges = torch._C.Graph.global_print_source_ranges  # type: ignore[attr-defined]
    try:
        torch._C.Graph.set_global_print_source_ranges(False)  # type: ignore[attr-defined]
        yield
    finally:
        torch._C.Graph.set_global_print_source_ranges(old_enable_source_ranges)  # type: ignore[attr-defined]



            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 199 Column: 9

                      torch._C.Graph.set_global_print_source_ranges(False)  # type: ignore[attr-defined]
        yield
    finally:
        torch._C.Graph.set_global_print_source_ranges(old_enable_source_ranges)  # type: ignore[attr-defined]


if not torch._C._jit_init():
    raise RuntimeError("JIT initialization failed")

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 202 Column: 8

                      torch._C.Graph.set_global_print_source_ranges(old_enable_source_ranges)  # type: ignore[attr-defined]


if not torch._C._jit_init():
    raise RuntimeError("JIT initialization failed")

            

Reported by Pylint.

torch/nn/modules/instancenorm.py
29 issues
Attempted relative import beyond top-level package
Error

Line: 3 Column: 1

              from torch import Tensor

from .batchnorm import _LazyNormBase, _NormBase
from .. import functional as F


class _InstanceNorm(_NormBase):
    def __init__(
        self,

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 4 Column: 1

              from torch import Tensor

from .batchnorm import _LazyNormBase, _NormBase
from .. import functional as F


class _InstanceNorm(_NormBase):
    def __init__(
        self,

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 22 Column: 32

                      super(_InstanceNorm, self).__init__(
            num_features, eps, momentum, affine, track_running_stats, **factory_kwargs)

    def _check_input_dim(self, input):
        raise NotImplementedError

    def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict,
                              missing_keys, unexpected_keys, error_msgs):
        version = local_metadata.get('version', None)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 55 Column: 23

                          state_dict, prefix, local_metadata, strict,
            missing_keys, unexpected_keys, error_msgs)

    def forward(self, input: Tensor) -> Tensor:
        self._check_input_dim(input)
        return F.instance_norm(
            input, self.running_mean, self.running_var, self.weight, self.bias,
            self.training or not self.track_running_stats, self.momentum, self.eps)


            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 130 Column: 32

                      >>> output = m(input)
    """

    def _check_input_dim(self, input):
        if input.dim() == 2:
            raise ValueError(
                'InstanceNorm1d returns 0-filled tensor to 2D tensor.'
                'This is because InstanceNorm1d reshapes inputs to'
                '(1, N * C, ...) from (N, C,...) and this makes'

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 169 Column: 32

              
    cls_to_become = InstanceNorm1d  # type: ignore[assignment]

    def _check_input_dim(self, input):
        if input.dim() == 2:
            raise ValueError(
                'InstanceNorm1d returns 0-filled tensor to 2D tensor.'
                'This is because InstanceNorm1d reshapes inputs to'
                '(1, N * C, ...) from (N, C,...) and this makes'

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 250 Column: 32

                      >>> output = m(input)
    """

    def _check_input_dim(self, input):
        if input.dim() != 4:
            raise ValueError('expected 4D input (got {}D input)'
                             .format(input.dim()))



            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 282 Column: 32

              
    cls_to_become = InstanceNorm2d  # type: ignore[assignment]

    def _check_input_dim(self, input):
        if input.dim() != 4:
            raise ValueError("expected 4D input (got {}D input)".format(input.dim()))


class InstanceNorm3d(_InstanceNorm):

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 355 Column: 32

                      >>> output = m(input)
    """

    def _check_input_dim(self, input):
        if input.dim() != 5:
            raise ValueError('expected 5D input (got {}D input)'
                             .format(input.dim()))



            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 387 Column: 32

              
    cls_to_become = InstanceNorm3d  # type: ignore[assignment]

    def _check_input_dim(self, input):
        if input.dim() != 5:
            raise ValueError("expected 5D input (got {}D input)".format(input.dim()))

            

Reported by Pylint.

caffe2/python/operator_test/learning_rate_adaption_op_test.py
29 issues
Unable to import 'hypothesis'
Error

Line: 10 Column: 1

              import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial

from hypothesis import given, settings
import hypothesis.strategies as st
import numpy as np


class TestLearningRateAdaption(serial.SerializedTestCase):

            

Reported by Pylint.

Unable to import 'hypothesis.strategies'
Error

Line: 11 Column: 1

              import caffe2.python.serialized_test.serialized_test_util as serial

from hypothesis import given, settings
import hypothesis.strategies as st
import numpy as np


class TestLearningRateAdaption(serial.SerializedTestCase):
    @given(inputs=hu.tensors(n=2),

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 24 Column: 58

                         **hu.gcs_cpu_only)
    @settings(deadline=None, max_examples=50)
    def test_learning_rate_adaption_op_normalization(self, inputs, lr, lr_alpha,
                                                     gc, dc):
        grad, effgrad = inputs
        lr = np.array([lr], dtype=np.float32)

        op = core.CreateOperator(
            'LearningRateAdaption',

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 59 Column: 76

                                         allow_nan=False, allow_infinity=False),
           **hu.gcs_cpu_only)
    def test_learning_rate_adaption_op_without_normalization(self, inputs, lr,
                                                             lr_alpha, gc, dc):
        grad, effgrad = inputs
        lr = np.array([lr], dtype=np.float32)

        op = core.CreateOperator(
            'LearningRateAdaption',

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              




from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial


            

Reported by Pylint.

Missing class docstring
Error

Line: 15 Column: 1

              import numpy as np


class TestLearningRateAdaption(serial.SerializedTestCase):
    @given(inputs=hu.tensors(n=2),
           lr=st.floats(min_value=0.01, max_value=0.99,
                        allow_nan=False, allow_infinity=False),
           lr_alpha=st.floats(min_value=0.01, max_value=0.99,
                           allow_nan=False, allow_infinity=False),

            

Reported by Pylint.

Too many arguments (6/5)
Error

Line: 23 Column: 5

                                         allow_nan=False, allow_infinity=False),
           **hu.gcs_cpu_only)
    @settings(deadline=None, max_examples=50)
    def test_learning_rate_adaption_op_normalization(self, inputs, lr, lr_alpha,
                                                     gc, dc):
        grad, effgrad = inputs
        lr = np.array([lr], dtype=np.float32)

        op = core.CreateOperator(

            

Reported by Pylint.

Argument name "lr" doesn't conform to snake_case naming style
Error

Line: 23 Column: 5

                                         allow_nan=False, allow_infinity=False),
           **hu.gcs_cpu_only)
    @settings(deadline=None, max_examples=50)
    def test_learning_rate_adaption_op_normalization(self, inputs, lr, lr_alpha,
                                                     gc, dc):
        grad, effgrad = inputs
        lr = np.array([lr], dtype=np.float32)

        op = core.CreateOperator(

            

Reported by Pylint.

Argument name "gc" doesn't conform to snake_case naming style
Error

Line: 23 Column: 5

                                         allow_nan=False, allow_infinity=False),
           **hu.gcs_cpu_only)
    @settings(deadline=None, max_examples=50)
    def test_learning_rate_adaption_op_normalization(self, inputs, lr, lr_alpha,
                                                     gc, dc):
        grad, effgrad = inputs
        lr = np.array([lr], dtype=np.float32)

        op = core.CreateOperator(

            

Reported by Pylint.

Argument name "dc" doesn't conform to snake_case naming style
Error

Line: 23 Column: 5

                                         allow_nan=False, allow_infinity=False),
           **hu.gcs_cpu_only)
    @settings(deadline=None, max_examples=50)
    def test_learning_rate_adaption_op_normalization(self, inputs, lr, lr_alpha,
                                                     gc, dc):
        grad, effgrad = inputs
        lr = np.array([lr], dtype=np.float32)

        op = core.CreateOperator(

            

Reported by Pylint.

benchmarks/operator_benchmark/benchmark_pytorch.py
29 issues
Unable to import 'torch'
Error

Line: 3 Column: 1

              import time
import json
import torch
import benchmark_cpp_extension  # noqa: F401


"""PyTorch performance microbenchmarks.

This module contains PyTorch-specific functionalities for performance

            

Reported by Pylint.

Unable to import 'benchmark_cpp_extension'
Error

Line: 4 Column: 1

              import time
import json
import torch
import benchmark_cpp_extension  # noqa: F401


"""PyTorch performance microbenchmarks.

This module contains PyTorch-specific functionalities for performance

            

Reported by Pylint.

Instance of 'TorchBenchmarkBase' has no '_auto_set_counter' member
Error

Line: 49 Column: 13

                          self._num_inputs_require_grads += 1
            return True
        else:
            self._auto_set_counter += 1
            return (self._pass_count == self._auto_set_counter)

    def extract_inputs_tuple(self):
        self.inputs_tuple = tuple(self.inputs.values())


            

Reported by Pylint.

Instance of 'TorchBenchmarkBase' has no '_auto_set_counter' member
Error

Line: 50 Column: 41

                          return True
        else:
            self._auto_set_counter += 1
            return (self._pass_count == self._auto_set_counter)

    def extract_inputs_tuple(self):
        self.inputs_tuple = tuple(self.inputs.values())

    @torch.jit.export

            

Reported by Pylint.

Unused import benchmark_cpp_extension
Error

Line: 4 Column: 1

              import time
import json
import torch
import benchmark_cpp_extension  # noqa: F401


"""PyTorch performance microbenchmarks.

This module contains PyTorch-specific functionalities for performance

            

Reported by Pylint.

String statement has no effect
Error

Line: 7 Column: 1

              import benchmark_cpp_extension  # noqa: F401


"""PyTorch performance microbenchmarks.

This module contains PyTorch-specific functionalities for performance
microbenchmarks.
"""


            

Reported by Pylint.

Attribute '_is_backward' defined outside __init__
Error

Line: 27 Column: 9

                      self._num_inputs_require_grads = 0

    def _set_backward_test(self, is_backward):
        self._is_backward = is_backward

    def auto_set(self):
        """ This is used to automatically set the require_grad for the backward patch.
            It is implemented based on two counters. One counter to save the number of
            times init has been called. The other counter to save the number of times

            

Reported by Pylint.

Attribute 'inputs_tuple' defined outside __init__
Error

Line: 53 Column: 9

                          return (self._pass_count == self._auto_set_counter)

    def extract_inputs_tuple(self):
        self.inputs_tuple = tuple(self.inputs.values())

    @torch.jit.export
    def get_inputs(self):
        # Need to convert the inputs to tuple outside of JIT so that
        # JIT can infer the size of the inputs.

            

Reported by Pylint.

Access to a protected member _consume of a client class
Error

Line: 71 Column: 13

                  def forward_consume(self, iters: int):
        #  _consume is used to avoid the dead-code-elimination optimization
        for _ in range(iters):
            torch.ops.operator_benchmark._consume(self.forward_impl())

    def module_name(self):
        """ this is used to label the operator being benchmarked
        """
        if self.user_given_name:

            

Reported by Pylint.

Unused argument 'print_per_iter'
Error

Line: 126 Column: 41

                      scripted_op_bench = torch.jit.script(self.op_bench)
        return scripted_op_bench.forward_consume

    def run_jit_forward(self, num_runs, print_per_iter=False, cuda_sync=False):
        """ Run the forward path of an op with JIT mode
        """
        if self._jit_forward_graph is None:
            self._jit_forward_graph = self._generate_jit_forward_graph()
        self._jit_forward_graph(num_runs)

            

Reported by Pylint.

benchmarks/operator_benchmark/benchmark_all_other_test.py
29 issues
Unable to import 'pt'
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    add_test, as_strided_test, batchnorm_test, binary_test, cat_test,
    channel_shuffle_test, chunk_test, conv_test, diag_test, embeddingbag_test,
    fill_test, gather_test, linear_test, matmul_test, nan_to_num_test, pool_test,
    softmax_test, hardsigmoid_test, hardswish_test, layernorm_test,
    groupnorm_test, interpolate_test, instancenorm_test, remainder_test,
    split_test, sum_test, tensor_to_test
)

            

Reported by Pylint.

Unused sum_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    add_test, as_strided_test, batchnorm_test, binary_test, cat_test,
    channel_shuffle_test, chunk_test, conv_test, diag_test, embeddingbag_test,
    fill_test, gather_test, linear_test, matmul_test, nan_to_num_test, pool_test,
    softmax_test, hardsigmoid_test, hardswish_test, layernorm_test,
    groupnorm_test, interpolate_test, instancenorm_test, remainder_test,
    split_test, sum_test, tensor_to_test
)

            

Reported by Pylint.

Unused split_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    add_test, as_strided_test, batchnorm_test, binary_test, cat_test,
    channel_shuffle_test, chunk_test, conv_test, diag_test, embeddingbag_test,
    fill_test, gather_test, linear_test, matmul_test, nan_to_num_test, pool_test,
    softmax_test, hardsigmoid_test, hardswish_test, layernorm_test,
    groupnorm_test, interpolate_test, instancenorm_test, remainder_test,
    split_test, sum_test, tensor_to_test
)

            

Reported by Pylint.

Unused remainder_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    add_test, as_strided_test, batchnorm_test, binary_test, cat_test,
    channel_shuffle_test, chunk_test, conv_test, diag_test, embeddingbag_test,
    fill_test, gather_test, linear_test, matmul_test, nan_to_num_test, pool_test,
    softmax_test, hardsigmoid_test, hardswish_test, layernorm_test,
    groupnorm_test, interpolate_test, instancenorm_test, remainder_test,
    split_test, sum_test, tensor_to_test
)

            

Reported by Pylint.

Unused instancenorm_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    add_test, as_strided_test, batchnorm_test, binary_test, cat_test,
    channel_shuffle_test, chunk_test, conv_test, diag_test, embeddingbag_test,
    fill_test, gather_test, linear_test, matmul_test, nan_to_num_test, pool_test,
    softmax_test, hardsigmoid_test, hardswish_test, layernorm_test,
    groupnorm_test, interpolate_test, instancenorm_test, remainder_test,
    split_test, sum_test, tensor_to_test
)

            

Reported by Pylint.

Unused interpolate_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    add_test, as_strided_test, batchnorm_test, binary_test, cat_test,
    channel_shuffle_test, chunk_test, conv_test, diag_test, embeddingbag_test,
    fill_test, gather_test, linear_test, matmul_test, nan_to_num_test, pool_test,
    softmax_test, hardsigmoid_test, hardswish_test, layernorm_test,
    groupnorm_test, interpolate_test, instancenorm_test, remainder_test,
    split_test, sum_test, tensor_to_test
)

            

Reported by Pylint.

Unused groupnorm_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    add_test, as_strided_test, batchnorm_test, binary_test, cat_test,
    channel_shuffle_test, chunk_test, conv_test, diag_test, embeddingbag_test,
    fill_test, gather_test, linear_test, matmul_test, nan_to_num_test, pool_test,
    softmax_test, hardsigmoid_test, hardswish_test, layernorm_test,
    groupnorm_test, interpolate_test, instancenorm_test, remainder_test,
    split_test, sum_test, tensor_to_test
)

            

Reported by Pylint.

Unused layernorm_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    add_test, as_strided_test, batchnorm_test, binary_test, cat_test,
    channel_shuffle_test, chunk_test, conv_test, diag_test, embeddingbag_test,
    fill_test, gather_test, linear_test, matmul_test, nan_to_num_test, pool_test,
    softmax_test, hardsigmoid_test, hardswish_test, layernorm_test,
    groupnorm_test, interpolate_test, instancenorm_test, remainder_test,
    split_test, sum_test, tensor_to_test
)

            

Reported by Pylint.

Unused hardswish_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    add_test, as_strided_test, batchnorm_test, binary_test, cat_test,
    channel_shuffle_test, chunk_test, conv_test, diag_test, embeddingbag_test,
    fill_test, gather_test, linear_test, matmul_test, nan_to_num_test, pool_test,
    softmax_test, hardsigmoid_test, hardswish_test, layernorm_test,
    groupnorm_test, interpolate_test, instancenorm_test, remainder_test,
    split_test, sum_test, tensor_to_test
)

            

Reported by Pylint.

Unused hardsigmoid_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    add_test, as_strided_test, batchnorm_test, binary_test, cat_test,
    channel_shuffle_test, chunk_test, conv_test, diag_test, embeddingbag_test,
    fill_test, gather_test, linear_test, matmul_test, nan_to_num_test, pool_test,
    softmax_test, hardsigmoid_test, hardswish_test, layernorm_test,
    groupnorm_test, interpolate_test, instancenorm_test, remainder_test,
    split_test, sum_test, tensor_to_test
)

            

Reported by Pylint.

test/jit/test_backend_nnapi.py
29 issues
Unable to import 'torch'
Error

Line: 5 Column: 1

              import sys
import unittest

import torch
import torch._C
from pathlib import Path
from test_nnapi import TestNNAPI
from torch.testing._internal.common_utils import TEST_WITH_ASAN


            

Reported by Pylint.

Unable to import 'torch._C'
Error

Line: 6 Column: 1

              import unittest

import torch
import torch._C
from pathlib import Path
from test_nnapi import TestNNAPI
from torch.testing._internal.common_utils import TEST_WITH_ASAN

# Make the helper files in test/ importable

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 9 Column: 1

              import torch._C
from pathlib import Path
from test_nnapi import TestNNAPI
from torch.testing._internal.common_utils import TEST_WITH_ASAN

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)


            

Reported by Pylint.

String statement has no effect
Error

Line: 22 Column: 1

                      "instead."
    )

"""
Unit Tests for Nnapi backend with delegate
Inherits most tests from TestNNAPI, which loads Android NNAPI models
without the delegate API.
"""
# First skip is needed for IS_WINDOWS or IS_MACOS to skip the tests.

            

Reported by Pylint.

Access to a protected member _jit_to_backend of a client class
Error

Line: 53 Column: 16

                  # Override
    def call_lowering_to_nnapi(self, traced_module, args):
        compile_spec = {"forward": {"inputs": args}}
        return torch._C._jit_to_backend("nnapi", traced_module, compile_spec)

    def test_tensor_input(self):
        # Lower a simple module
        args = torch.tensor([[1.0, -1.0, 2.0, -2.0]]).unsqueeze(-1).unsqueeze(-1)
        module = torch.nn.PReLU()

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 53 Column: 16

                  # Override
    def call_lowering_to_nnapi(self, traced_module, args):
        compile_spec = {"forward": {"inputs": args}}
        return torch._C._jit_to_backend("nnapi", traced_module, compile_spec)

    def test_tensor_input(self):
        # Lower a simple module
        args = torch.tensor([[1.0, -1.0, 2.0, -2.0]]).unsqueeze(-1).unsqueeze(-1)
        module = torch.nn.PReLU()

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 81 Column: 13

                      # No forward key
        compile_spec = {"backward": {"inputs": args}}
        with self.assertRaisesRegex(RuntimeError, "method_compile_spec does not contain the \"forward\" key." + errorMsgTail):
            torch._C._jit_to_backend("nnapi", traced, compile_spec)

        # No dictionary under the forward key
        compile_spec = {"forward": 1}
        with self.assertRaisesRegex(RuntimeError,
                                    "method_compile_spec does not contain a dictionary with an \"inputs\" key, "

            

Reported by Pylint.

Access to a protected member _jit_to_backend of a client class
Error

Line: 81 Column: 13

                      # No forward key
        compile_spec = {"backward": {"inputs": args}}
        with self.assertRaisesRegex(RuntimeError, "method_compile_spec does not contain the \"forward\" key." + errorMsgTail):
            torch._C._jit_to_backend("nnapi", traced, compile_spec)

        # No dictionary under the forward key
        compile_spec = {"forward": 1}
        with self.assertRaisesRegex(RuntimeError,
                                    "method_compile_spec does not contain a dictionary with an \"inputs\" key, "

            

Reported by Pylint.

Access to a protected member _jit_to_backend of a client class
Error

Line: 89 Column: 13

                                                  "method_compile_spec does not contain a dictionary with an \"inputs\" key, "
                                    "under it's \"forward\" key."
                                    + errorMsgTail):
            torch._C._jit_to_backend("nnapi", traced, compile_spec)

        # No inputs key (in the dictionary under the forward key)
        compile_spec = {"forward": {"not inputs": args}}
        with self.assertRaisesRegex(RuntimeError,
                                    "method_compile_spec does not contain a dictionary with an \"inputs\" key, "

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 89 Column: 13

                                                  "method_compile_spec does not contain a dictionary with an \"inputs\" key, "
                                    "under it's \"forward\" key."
                                    + errorMsgTail):
            torch._C._jit_to_backend("nnapi", traced, compile_spec)

        # No inputs key (in the dictionary under the forward key)
        compile_spec = {"forward": {"not inputs": args}}
        with self.assertRaisesRegex(RuntimeError,
                                    "method_compile_spec does not contain a dictionary with an \"inputs\" key, "

            

Reported by Pylint.

benchmarks/functional_autograd_benchmark/functional_autograd_benchmark.py
29 issues
Unable to import 'torch'
Error

Line: 1 Column: 1

              import torch
from torch.autograd import functional

import time
from argparse import ArgumentParser
from collections import defaultdict
from typing import NamedTuple, Callable, List, Any

import ppl_models

            

Reported by Pylint.

Unable to import 'torch.autograd'
Error

Line: 2 Column: 1

              import torch
from torch.autograd import functional

import time
from argparse import ArgumentParser
from collections import defaultdict
from typing import NamedTuple, Callable, List, Any

import ppl_models

            

Reported by Pylint.

Unused variable 'res'
Error

Line: 78 Column: 9

                  func = getattr(functional, task)

    if v is not None:
        res = func(model, inp, v=v, strict=True)
    else:
        res = func(model, inp, strict=True)

def run_model(model_getter: GetterType, args: Any, task: str) -> List[float]:
    if args.gpu == -1:

            

Reported by Pylint.

Unused variable 'it'
Error

Line: 100 Column: 9

                  run_once(model, inp, task, v)

    elapsed = []
    for it in range(args.num_iters):
        do_sync()
        start = time.time()
        run_once(model, inp, task, v)
        do_sync()
        elapsed.append(time.time() - start)

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
from torch.autograd import functional

import time
from argparse import ArgumentParser
from collections import defaultdict
from typing import NamedTuple, Callable, List, Any

import ppl_models

            

Reported by Pylint.

standard import "import time" should be placed before "import torch"
Error

Line: 4 Column: 1

              import torch
from torch.autograd import functional

import time
from argparse import ArgumentParser
from collections import defaultdict
from typing import NamedTuple, Callable, List, Any

import ppl_models

            

Reported by Pylint.

standard import "from argparse import ArgumentParser" should be placed before "import torch"
Error

Line: 5 Column: 1

              from torch.autograd import functional

import time
from argparse import ArgumentParser
from collections import defaultdict
from typing import NamedTuple, Callable, List, Any

import ppl_models
import vision_models

            

Reported by Pylint.

standard import "from collections import defaultdict" should be placed before "import torch"
Error

Line: 6 Column: 1

              
import time
from argparse import ArgumentParser
from collections import defaultdict
from typing import NamedTuple, Callable, List, Any

import ppl_models
import vision_models
import audio_text_models

            

Reported by Pylint.

standard import "from typing import NamedTuple, Callable, List, Any" should be placed before "import torch"
Error

Line: 7 Column: 1

              import time
from argparse import ArgumentParser
from collections import defaultdict
from typing import NamedTuple, Callable, List, Any

import ppl_models
import vision_models
import audio_text_models


            

Reported by Pylint.

Line too long (101/100)
Error

Line: 38 Column: 1

              # - getter: a function to get the model. It takes as input the device on which the model
#     will run. It should return the forward function and the parameters (Tensors) used as
#     input for the forward function. Note that the forward must *not* have any side effect.
# - tasks: the list of recommended tasks that can run in a reasonable amount of time with this model.
# - unsupported: the list of tasks that this model cannot run.
class ModelDef(NamedTuple):
    name: str
    getter: GetterType
    tasks: List[str]

            

Reported by Pylint.

torch/fx/passes/split_module.py
29 issues
Unused argument 'root_m'
Error

Line: 28 Column: 5

              # Creates subgraphs out of main graph
def split_module(
    m: GraphModule,
    root_m: torch.nn.Module,
    split_callback: Callable[[torch.fx.node.Node], int],
):
    partitions: Dict[str, Partition] = {}
    orig_nodes: Dict[str, torch.fx.node.Node] = {}


            

Reported by Pylint.

TODO currently placeholders/parameters aren't put into random partitions,
Error

Line: 54 Column: 3

                  for node in m.graph.nodes:
        orig_nodes[node.name] = node

        # TODO currently placeholders/parameters aren't put into random partitions,
        # rather they're added to the graphs where they are used down below
        if node.op in ["placeholder", "get_attr"]:
            continue
        if node.op == 'output':
            torch.fx.graph.map_arg(node.args[0], lambda n: record_cross_partition_use(n, None))

            

Reported by Pylint.

Access to a protected member _fx_partition of a client class
Error

Line: 69 Column: 9

                          partitions[partition_name] = partition = Partition(partition_name)

        partition.node_names.append(node.name)
        node._fx_partition = partition_name

        torch.fx.graph.map_arg(node.args, lambda def_node: record_cross_partition_use(def_node, node))
        torch.fx.graph.map_arg(node.kwargs, lambda def_node: record_cross_partition_use(def_node, node))  # noqa: B950

    # find partitions with no dependencies

            

Reported by Pylint.

Cell variable node defined in loop
Error

Line: 71 Column: 97

                      partition.node_names.append(node.name)
        node._fx_partition = partition_name

        torch.fx.graph.map_arg(node.args, lambda def_node: record_cross_partition_use(def_node, node))
        torch.fx.graph.map_arg(node.kwargs, lambda def_node: record_cross_partition_use(def_node, node))  # noqa: B950

    # find partitions with no dependencies
    root_partitions : List[str] = []
    for partition_name, partition in partitions.items():

            

Reported by Pylint.

Cell variable node defined in loop
Error

Line: 72 Column: 99

                      node._fx_partition = partition_name

        torch.fx.graph.map_arg(node.args, lambda def_node: record_cross_partition_use(def_node, node))
        torch.fx.graph.map_arg(node.kwargs, lambda def_node: record_cross_partition_use(def_node, node))  # noqa: B950

    # find partitions with no dependencies
    root_partitions : List[str] = []
    for partition_name, partition in partitions.items():
        if not len(partition.partitions_dependent_on):

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 95 Column: 13

                  # add placeholders to parititons
    for partition_name in sorted_partitions:
        partition = partitions[partition_name]
        for input in partition.inputs:
            placeholder = partition.graph.placeholder(input)
            placeholder.meta = orig_nodes[input].meta.copy()
            partition.environment[orig_nodes[input]] = placeholder

    # Transform nodes and collect targets for partition's submodule

            

Reported by Pylint.

Access to a protected member _fx_partition of a client class
Error

Line: 103 Column: 36

                  # Transform nodes and collect targets for partition's submodule
    for node in m.graph.nodes:
        if hasattr(node, '_fx_partition'):
            partition = partitions[node._fx_partition]

            # swap out old graph nodes in kw/args with references to new nodes in this submodule
            environment = partition.environment
            gathered_args = torch.fx.graph.map_arg(node.args, lambda n : environment[n])
            gathered_kwargs = torch.fx.graph.map_arg(node.kwargs, lambda n : environment[n])

            

Reported by Pylint.

Cell variable environment defined in loop
Error

Line: 107 Column: 74

              
            # swap out old graph nodes in kw/args with references to new nodes in this submodule
            environment = partition.environment
            gathered_args = torch.fx.graph.map_arg(node.args, lambda n : environment[n])
            gathered_kwargs = torch.fx.graph.map_arg(node.kwargs, lambda n : environment[n])

            if node.op not in ['call_module', 'get_attr']:
                target = node.target
            else:

            

Reported by Pylint.

Cell variable environment defined in loop
Error

Line: 108 Column: 78

                          # swap out old graph nodes in kw/args with references to new nodes in this submodule
            environment = partition.environment
            gathered_args = torch.fx.graph.map_arg(node.args, lambda n : environment[n])
            gathered_kwargs = torch.fx.graph.map_arg(node.kwargs, lambda n : environment[n])

            if node.op not in ['call_module', 'get_attr']:
                target = node.target
            else:
                target_atoms = node.target.split('.')

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
from torch.fx.graph_module import GraphModule
from typing import Callable, List, Dict, Any, Optional

class Partition:
    def __init__(self, name: str):
        self.name: str = name
        self.node_names: List[str] = []
        self.inputs: Dict[str, None] = {}

            

Reported by Pylint.

.jenkins/pytorch/create_test_cert.py
29 issues
No value for argument 'backend' in function call
Error

Line: 14 Column: 11

              

def genrsa(path):
    key = rsa.generate_private_key(
        public_exponent=65537,
        key_size=2048,
    )
    with open(path, "wb") as f:
        f.write(key.private_bytes(

            

Reported by Pylint.

No value for argument 'backend' in method call
Error

Line: 34 Column: 12

                      x509.NameAttribute(NameOID.LOCALITY_NAME, L),
        x509.NameAttribute(NameOID.ORGANIZATION_NAME, O),
    ])
    cert = x509.CertificateBuilder().subject_name(
        subject
    ).issuer_name(
        issuer
    ).public_key(
        key.public_key()

            

Reported by Pylint.

No value for argument 'backend' in method call
Error

Line: 57 Column: 11

              

def create_req(path, C, ST, L, O, key):
    csr = x509.CertificateSigningRequestBuilder().subject_name(x509.Name([
        # Provide various details about who we are.
        x509.NameAttribute(NameOID.COUNTRY_NAME, C),
        x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, ST),
        x509.NameAttribute(NameOID.LOCALITY_NAME, L),
        x509.NameAttribute(NameOID.ORGANIZATION_NAME, O),

            

Reported by Pylint.

No value for argument 'backend' in method call
Error

Line: 70 Column: 12

              

def sign_certificate_request(path, csr_cert, ca_cert, private_ca_key):
    cert = x509.CertificateBuilder().subject_name(
        csr_cert.subject
    ).issuer_name(
        ca_cert.subject
    ).public_key(
        csr_cert.public_key()

            

Reported by Pylint.

Redefining name 'cert' from outer scope (line 96)
Error

Line: 34 Column: 5

                      x509.NameAttribute(NameOID.LOCALITY_NAME, L),
        x509.NameAttribute(NameOID.ORGANIZATION_NAME, O),
    ])
    cert = x509.CertificateBuilder().subject_name(
        subject
    ).issuer_name(
        issuer
    ).public_key(
        key.public_key()

            

Reported by Pylint.

Redefining name 'csr' from outer scope (line 94)
Error

Line: 57 Column: 5

              

def create_req(path, C, ST, L, O, key):
    csr = x509.CertificateSigningRequestBuilder().subject_name(x509.Name([
        # Provide various details about who we are.
        x509.NameAttribute(NameOID.COUNTRY_NAME, C),
        x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, ST),
        x509.NameAttribute(NameOID.LOCALITY_NAME, L),
        x509.NameAttribute(NameOID.ORGANIZATION_NAME, O),

            

Reported by Pylint.

Redefining name 'ca_cert' from outer scope (line 91)
Error

Line: 69 Column: 46

                  return csr


def sign_certificate_request(path, csr_cert, ca_cert, private_ca_key):
    cert = x509.CertificateBuilder().subject_name(
        csr_cert.subject
    ).issuer_name(
        ca_cert.subject
    ).public_key(

            

Reported by Pylint.

Redefining name 'cert' from outer scope (line 96)
Error

Line: 70 Column: 5

              

def sign_certificate_request(path, csr_cert, ca_cert, private_ca_key):
    cert = x509.CertificateBuilder().subject_name(
        csr_cert.subject
    ).issuer_name(
        ca_cert.subject
    ).public_key(
        csr_cert.public_key()

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from datetime import datetime, timedelta
from tempfile import mkdtemp
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography import x509
from cryptography.x509.oid import NameOID
from cryptography.hazmat.primitives import hashes

temp_dir = mkdtemp()

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 13 Column: 1

              print(temp_dir)


def genrsa(path):
    key = rsa.generate_private_key(
        public_exponent=65537,
        key_size=2048,
    )
    with open(path, "wb") as f:

            

Reported by Pylint.