The following issues were found

test/distributed/elastic/multiprocessing/api_test.py
98 issues
Unable to import 'torch'
Error

Line: 22 Column: 1

              from unittest import mock
from unittest.mock import patch

import torch
import torch.multiprocessing as mp
from torch.distributed.elastic.multiprocessing import ProcessFailure, start_processes
from torch.distributed.elastic.multiprocessing.api import (
    MultiprocessContext,
    SignalException,

            

Reported by Pylint.

Unable to import 'torch.multiprocessing'
Error

Line: 23 Column: 1

              from unittest.mock import patch

import torch
import torch.multiprocessing as mp
from torch.distributed.elastic.multiprocessing import ProcessFailure, start_processes
from torch.distributed.elastic.multiprocessing.api import (
    MultiprocessContext,
    SignalException,
    RunProcsResult,

            

Reported by Pylint.

Unable to import 'torch.distributed.elastic.multiprocessing'
Error

Line: 24 Column: 1

              
import torch
import torch.multiprocessing as mp
from torch.distributed.elastic.multiprocessing import ProcessFailure, start_processes
from torch.distributed.elastic.multiprocessing.api import (
    MultiprocessContext,
    SignalException,
    RunProcsResult,
    Std,

            

Reported by Pylint.

Unable to import 'torch.distributed.elastic.multiprocessing.api'
Error

Line: 25 Column: 1

              import torch
import torch.multiprocessing as mp
from torch.distributed.elastic.multiprocessing import ProcessFailure, start_processes
from torch.distributed.elastic.multiprocessing.api import (
    MultiprocessContext,
    SignalException,
    RunProcsResult,
    Std,
    _validate_full_rank,

            

Reported by Pylint.

Unable to import 'torch.distributed.elastic.multiprocessing.errors.error_handler'
Error

Line: 34 Column: 1

                  to_map,
    _wrap,
)
from torch.distributed.elastic.multiprocessing.errors.error_handler import _write_error
from torch.testing._internal.common_utils import (
    NO_MULTIPROCESSING_SPAWN,
    TEST_WITH_ASAN,
    TEST_WITH_DEV_DBG_ASAN,
    TEST_WITH_TSAN,

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 35 Column: 1

                  _wrap,
)
from torch.distributed.elastic.multiprocessing.errors.error_handler import _write_error
from torch.testing._internal.common_utils import (
    NO_MULTIPROCESSING_SPAWN,
    TEST_WITH_ASAN,
    TEST_WITH_DEV_DBG_ASAN,
    TEST_WITH_TSAN,
    IS_IN_CI,

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 45 Column: 1

                  IS_MACOS,
    sandcastle_skip_if,
)
from torch.testing._internal.common_utils import run_tests


class RunProcResultsTest(unittest.TestCase):
    def setUp(self):
        self.test_dir = tempfile.mkdtemp(prefix=f"{self.__class__.__name__}_")

            

Reported by Pylint.

Module 'signal' has no 'Signals' member
Error

Line: 825 Column: 69

                              self._test_zombie_workflow(wait_fn, s)

        def _test_zombie_workflow(
            self, entrypoint: Union[str, Callable], signal_to_send: signal.Signals
        ) -> None:
            mp_queue = mp.get_context("spawn").Queue()
            child_nproc = 2
            ctx = mp.spawn(
                start_processes_zombie_test,

            

Reported by Pylint.

Unused argument 'log_mock'
Error

Line: 66 Column: 33

                      self.assertTrue(pr_fail.is_failed())

    @patch("torch.distributed.elastic.multiprocessing.errors.log")
    def test_get_failures(self, log_mock):
        with mock.patch("time.time", side_effect=[3, 2, 1]):
            error_file0 = os.path.join(self.test_dir, "error0.json")
            error_file1 = os.path.join(self.test_dir, "error1.json")
            _write_error(RuntimeError("error 0"), error_file0)
            _write_error(RuntimeError("error 1"), error_file1)

            

Reported by Pylint.

Redefining built-in 'bin'
Error

Line: 179 Column: 1

                  ]


def bin(name: str):
    dir = os.path.dirname(__file__)
    return os.path.join(dir, "bin", name)


def wait_fn(wait_time: int = 300) -> None:

            

Reported by Pylint.

torch/nn/modules/sparse.py
98 issues
Attempted relative import beyond top-level package
Error

Line: 7 Column: 1

              from torch import Tensor
from torch.nn.parameter import Parameter

from .module import Module
from .. import functional as F
from .. import init


class Embedding(Module):

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 8 Column: 1

              from torch.nn.parameter import Parameter

from .module import Module
from .. import functional as F
from .. import init


class Embedding(Module):
    r"""A simple lookup table that stores embeddings of a fixed dictionary and size.

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 9 Column: 1

              
from .module import Module
from .. import functional as F
from .. import init


class Embedding(Module):
    r"""A simple lookup table that stores embeddings of a fixed dictionary and size.


            

Reported by Pylint.

Module 'torch' has no 'empty' member
Error

Line: 139 Column: 37

                      self.norm_type = norm_type
        self.scale_grad_by_freq = scale_grad_by_freq
        if _weight is None:
            self.weight = Parameter(torch.empty((num_embeddings, embedding_dim), **factory_kwargs))
            self.reset_parameters()
        else:
            assert list(_weight.shape) == [num_embeddings, embedding_dim], \
                'Shape of weight does not match num_embeddings and embedding_dim'
            self.weight = Parameter(_weight)

            

Reported by Pylint.

Module 'torch' has no 'empty' member
Error

Line: 333 Column: 37

                              padding_idx = self.num_embeddings + padding_idx
        self.padding_idx = padding_idx
        if _weight is None:
            self.weight = Parameter(torch.empty((num_embeddings, embedding_dim), **factory_kwargs))
            self.reset_parameters()
        else:
            assert list(_weight.shape) == [num_embeddings, embedding_dim], \
                'Shape of weight does not match num_embeddings and embedding_dim'
            self.weight = Parameter(_weight)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 157 Column: 23

                          with torch.no_grad():
                self.weight[self.padding_idx].fill_(0)

    def forward(self, input: Tensor) -> Tensor:
        return F.embedding(
            input, self.weight, self.padding_idx, self.max_norm,
            self.norm_type, self.scale_grad_by_freq, self.sparse)

    def extra_repr(self) -> str:

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 352 Column: 23

                          with torch.no_grad():
                self.weight[self.padding_idx].fill_(0)

    def forward(self, input: Tensor, offsets: Optional[Tensor] = None, per_sample_weights: Optional[Tensor] = None) -> Tensor:
        """Forward pass of EmbeddingBag.

        Args:
            input (Tensor): Tensor containing bags of indices into the embedding matrix.
            offsets (Tensor, optional): Only used when :attr:`input` is 1D. :attr:`offsets` determines

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from typing import Optional

import torch
from torch import Tensor
from torch.nn.parameter import Parameter

from .module import Module
from .. import functional as F
from .. import init

            

Reported by Pylint.

Too many instance attributes (8/7)
Error

Line: 12 Column: 1

              from .. import init


class Embedding(Module):
    r"""A simple lookup table that stores embeddings of a fixed dictionary and size.

    This module is often used to store word embeddings and retrieve them using indices.
    The input to the module is a list of indices, and the output is the corresponding
    word embeddings.

            

Reported by Pylint.

Line too long (120/100)
Error

Line: 22 Column: 1

                  Args:
        num_embeddings (int): size of the dictionary of embeddings
        embedding_dim (int): the size of each embedding vector
        padding_idx (int, optional): If specified, the entries at :attr:`padding_idx` do not contribute to the gradient;
                                     therefore, the embedding vector at :attr:`padding_idx` is not updated during training,
                                     i.e. it remains as a fixed "pad". For a newly constructed Embedding,
                                     the embedding vector at :attr:`padding_idx` will default to all zeros,
                                     but can be updated to another value to be used as the padding vector.
        max_norm (float, optional): If given, each embedding vector with norm larger than :attr:`max_norm`

            

Reported by Pylint.

test/jit/test_builtins.py
97 issues
Unable to import 'torch'
Error

Line: 7 Column: 1

              import unittest
from typing import Dict, List

import torch
from torch.testing import FileCheck

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)

            

Reported by Pylint.

Unable to import 'torch.testing'
Error

Line: 8 Column: 1

              from typing import Dict, List

import torch
from torch.testing import FileCheck

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase, RUN_CUDA

            

Reported by Pylint.

Unable to import 'torch.testing._internal.jit_utils'
Error

Line: 13 Column: 1

              # Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase, RUN_CUDA

if __name__ == '__main__':
    raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
                       "\tpython test/test_jit.py TESTNAME\n\n"
                       "instead.")

            

Reported by Pylint.

Bad first argument 'Mod' given to super()
Error

Line: 60 Column: 17

                  def test_has_attr_invalid_args(self):
        class Mod(torch.nn.Module):
            def __init__(self):
                super(Mod, self).__init__()
                self.mod = torch.nn.Linear(1, 1)

            def forward(self, name):
                # not allowed, `name` must be static.
                return hasattr(self.mod, name)

            

Reported by Pylint.

class already defined line 58
Error

Line: 70 Column: 9

                      with self.assertRaisesRegexWithHighlight(RuntimeError, "hasattr", "name"):
            torch.jit.script(Mod())

        class Mod(torch.nn.Module):
            def __init__(self):
                super(Mod, self).__init__()

            def forward(self, name):
                # not allowed, `torch.rand` is not a class type

            

Reported by Pylint.

function already defined line 82
Error

Line: 91 Column: 13

              
        with self.assertRaisesRegexWithHighlight(RuntimeError, "undefined value", "a"):
            @torch.jit.script
            def fn(x):
                a = x ** 2
                del a
                return a

        with self.assertRaisesRegexWithHighlight(RuntimeError, "undefined value", "a"):

            

Reported by Pylint.

function already defined line 82
Error

Line: 98 Column: 13

              
        with self.assertRaisesRegexWithHighlight(RuntimeError, "undefined value", "a"):
            @torch.jit.script
            def fn(x):
                a = x ** 2
                if a:
                    del a
                return a


            

Reported by Pylint.

function already defined line 82
Error

Line: 106 Column: 13

              
        with self.assertRaisesRegexWithHighlight(RuntimeError, "undefined value", "b"):
            @torch.jit.script
            def fn(x):
                a = x ** 2
                del b
                return a

    def test_del_multiple_operands(self):

            

Reported by Pylint.

Undefined variable 'b'
Error

Line: 108 Column: 21

                          @torch.jit.script
            def fn(x):
                a = x ** 2
                del b
                return a

    def test_del_multiple_operands(self):
        def fn(x: List[int]) -> List[int]:
            a, b, c = x[0], x[1], x[2]

            

Reported by Pylint.

Instance of 'int' has no 'add' member
Error

Line: 237 Column: 20

                  def test_method_on_number(self):
        def func():
            c = 1
            return c.add(1)
        with self.assertRaisesRegex(RuntimeError, 'object has no attribute or method'):
            torch.jit.script(func)

    # testing implicit conversion of tensors to scalars to match function arguments
    def test_scalar_to_num_conversions(self):

            

Reported by Pylint.

caffe2/python/hypothesis_test_util.py
95 issues
Unable to import 'hypothesis'
Error

Line: 47 Column: 1

              import contextlib
import copy
import functools
import hypothesis
import hypothesis.extra.numpy
import hypothesis.strategies as st
import logging
import numpy as np
import os

            

Reported by Pylint.

Unable to import 'hypothesis.extra.numpy'
Error

Line: 48 Column: 1

              import copy
import functools
import hypothesis
import hypothesis.extra.numpy
import hypothesis.strategies as st
import logging
import numpy as np
import os
import struct

            

Reported by Pylint.

Unable to import 'hypothesis.strategies'
Error

Line: 49 Column: 1

              import functools
import hypothesis
import hypothesis.extra.numpy
import hypothesis.strategies as st
import logging
import numpy as np
import os
import struct


            

Reported by Pylint.

Redefining name 'dims' from outer scope (line 137)
Error

Line: 160 Column: 12

                  return elems if filter_ is None else elems.filter(filter_)


def arrays(dims, dtype=np.float32, elements=None, unique=False):
    if elements is None:
        elements = elements_of_type(dtype)
    return hypothesis.extra.numpy.arrays(
        dtype,
        dims,

            

Reported by Pylint.

Unused argument 'kwargs'
Error

Line: 202 Column: 1

                          elements=st.integers(min_value=0, max_value=2 * size))


def lengths(size, min_segments=None, max_segments=None, **kwargs):
    # First generate number of boarders between segments
    # Then create boarder values and add 0 and size
    # By sorting and computing diff we convert them to lengths of
    # possible 0 value
    if min_segments is None:

            

Reported by Pylint.

Keyword argument before variable positional arguments list in the definition of lengths_tensor function
Error

Line: 251 Column: 1

                  ))


def lengths_tensor(min_segments=None, max_segments=None, *args, **kwargs):
    gen = functools.partial(
        lengths, min_segments=min_segments, max_segments=max_segments)
    return segmented_tensor(*args, segment_generator=gen, **kwargs)



            

Reported by Pylint.

Redefining name 'device_options' from outer scope (line 306)
Error

Line: 409 Column: 9

              
    def assertDeviceChecks(
        self,
        device_options,
        op,
        inputs,
        outputs_to_check,
        input_device_options=None,
        threshold=0.01

            

Reported by Pylint.

standard import "import contextlib" should be placed before "from caffe2.proto import caffe2_pb2"
Error

Line: 44 Column: 1

              from caffe2.proto import caffe2_pb2
from caffe2.python import (
    workspace, device_checker, gradient_checker, test_util, core)
import contextlib
import copy
import functools
import hypothesis
import hypothesis.extra.numpy
import hypothesis.strategies as st

            

Reported by Pylint.

standard import "import copy" should be placed before "from caffe2.proto import caffe2_pb2"
Error

Line: 45 Column: 1

              from caffe2.python import (
    workspace, device_checker, gradient_checker, test_util, core)
import contextlib
import copy
import functools
import hypothesis
import hypothesis.extra.numpy
import hypothesis.strategies as st
import logging

            

Reported by Pylint.

standard import "import functools" should be placed before "from caffe2.proto import caffe2_pb2"
Error

Line: 46 Column: 1

                  workspace, device_checker, gradient_checker, test_util, core)
import contextlib
import copy
import functools
import hypothesis
import hypothesis.extra.numpy
import hypothesis.strategies as st
import logging
import numpy as np

            

Reported by Pylint.

torch/package/package_exporter.py
95 issues
Attempted relative import beyond top-level package
Error

Line: 27 Column: 1

              from torch.serialization import location_tag, normalize_storage_type
from torch.utils.hooks import RemovableHandle

from ._digraph import DiGraph
from ._importlib import _normalize_path
from ._mangling import is_mangled
from ._package_pickler import create_pickler
from ._stdlib import is_stdlib_module
from .find_file_dependencies import find_files_source_depends_on

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 28 Column: 1

              from torch.utils.hooks import RemovableHandle

from ._digraph import DiGraph
from ._importlib import _normalize_path
from ._mangling import is_mangled
from ._package_pickler import create_pickler
from ._stdlib import is_stdlib_module
from .find_file_dependencies import find_files_source_depends_on
from .glob_group import GlobGroup, GlobPattern

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 29 Column: 1

              
from ._digraph import DiGraph
from ._importlib import _normalize_path
from ._mangling import is_mangled
from ._package_pickler import create_pickler
from ._stdlib import is_stdlib_module
from .find_file_dependencies import find_files_source_depends_on
from .glob_group import GlobGroup, GlobPattern
from .importer import Importer, OrderedImporter, sys_importer

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 30 Column: 1

              from ._digraph import DiGraph
from ._importlib import _normalize_path
from ._mangling import is_mangled
from ._package_pickler import create_pickler
from ._stdlib import is_stdlib_module
from .find_file_dependencies import find_files_source_depends_on
from .glob_group import GlobGroup, GlobPattern
from .importer import Importer, OrderedImporter, sys_importer


            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 31 Column: 1

              from ._importlib import _normalize_path
from ._mangling import is_mangled
from ._package_pickler import create_pickler
from ._stdlib import is_stdlib_module
from .find_file_dependencies import find_files_source_depends_on
from .glob_group import GlobGroup, GlobPattern
from .importer import Importer, OrderedImporter, sys_importer

_gate_torchscript_serialization = True

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 32 Column: 1

              from ._mangling import is_mangled
from ._package_pickler import create_pickler
from ._stdlib import is_stdlib_module
from .find_file_dependencies import find_files_source_depends_on
from .glob_group import GlobGroup, GlobPattern
from .importer import Importer, OrderedImporter, sys_importer

_gate_torchscript_serialization = True


            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 33 Column: 1

              from ._package_pickler import create_pickler
from ._stdlib import is_stdlib_module
from .find_file_dependencies import find_files_source_depends_on
from .glob_group import GlobGroup, GlobPattern
from .importer import Importer, OrderedImporter, sys_importer

_gate_torchscript_serialization = True

ActionHook = Callable[["PackageExporter", str], None]

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 34 Column: 1

              from ._stdlib import is_stdlib_module
from .find_file_dependencies import find_files_source_depends_on
from .glob_group import GlobGroup, GlobPattern
from .importer import Importer, OrderedImporter, sys_importer

_gate_torchscript_serialization = True

ActionHook = Callable[["PackageExporter", str], None]


            

Reported by Pylint.

Unused GlobPattern imported from glob_group
Error

Line: 33 Column: 1

              from ._package_pickler import create_pickler
from ._stdlib import is_stdlib_module
from .find_file_dependencies import find_files_source_depends_on
from .glob_group import GlobGroup, GlobPattern
from .importer import Importer, OrderedImporter, sys_importer

_gate_torchscript_serialization = True

ActionHook = Callable[["PackageExporter", str], None]

            

Reported by Pylint.

Unnecessary pass statement
Error

Line: 104 Column: 5

                  ``allow_empty=False``, and is not matched with any module during packaging.
    """

    pass


class PackagingError(Exception):
    """This exception is raised when there is an issue with exporting a package.
    ``PackageExporter`` will attempt to gather up all the errors and present

            

Reported by Pylint.

caffe2/python/operator_test/specialized_segment_ops_test.py
95 issues
Unable to import 'hypothesis.strategies'
Error

Line: 9 Column: 1

              from caffe2.python import core
import caffe2.python.hip_test_util as hiputl
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from hypothesis import given, assume, settings


class TestSpecializedSegmentOps(hu.HypothesisTestCase):

            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 11 Column: 1

              import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from hypothesis import given, assume, settings


class TestSpecializedSegmentOps(hu.HypothesisTestCase):
    @given(
        batchsize=st.integers(1, 20),

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 175 Column: 78

                      **hu.gcs_cpu_only
    )
    def test_sparse_lengths_weightedsum_8BitsRowwiseOp_cpu(
        self, batchsize, blocksize, normalize_by_lengths, empty_indices, gc, dc
    ):
        if normalize_by_lengths:
            print(
                "<test_sparse_lengths_weightedsum_SparseLengthsWeightedMean8BitsRowwise_cpu>"
            )

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 242 Column: 78

                      **hu.gcs_cpu_only
    )
    def test_sparse_lengths_sum_8BitsRowwiseOp_cpu(
        self, batchsize, blocksize, normalize_by_lengths, empty_indices, gc, dc
    ):
        if normalize_by_lengths:
            print("<test_sparse_lengths_sum_SparseLengthsMean8BitsRowwise_cpu>")
        else:
            print("<test_sparse_lengths_sum_SparseLengthsSum8BitsRowwise_cpu>")

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 301 Column: 63

                  )
    @settings(deadline=10000)
    def test_sparse_lengths_sum_8BitsRowwiseOp_cpu_invalid_index(
        self, batchsize, blocksize, normalize_by_lengths, gc, dc
    ):

        tblsize = 300
        Tbl = np.random.randint(7, size=(tblsize, blocksize), dtype=np.uint8)


            

Reported by Pylint.

Unused argument 'gc'
Error

Line: 301 Column: 59

                  )
    @settings(deadline=10000)
    def test_sparse_lengths_sum_8BitsRowwiseOp_cpu_invalid_index(
        self, batchsize, blocksize, normalize_by_lengths, gc, dc
    ):

        tblsize = 300
        Tbl = np.random.randint(7, size=(tblsize, blocksize), dtype=np.uint8)


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              

import unittest

from caffe2.proto import caffe2_pb2
from caffe2.python import core
import caffe2.python.hip_test_util as hiputl
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st

            

Reported by Pylint.

Missing class docstring
Error

Line: 14 Column: 1

              from hypothesis import given, assume, settings


class TestSpecializedSegmentOps(hu.HypothesisTestCase):
    @given(
        batchsize=st.integers(1, 20),
        fptype=st.sampled_from([np.float16, np.float32]),
        fp16asint=st.booleans(),
        blocksize=st.sampled_from([8, 16, 32, 64, 85, 96, 128, 163]),

            

Reported by Pylint.

Argument name "gc" doesn't conform to snake_case naming style
Error

Line: 23 Column: 5

                      normalize_by_lengths=st.booleans(),
        empty_indices=st.booleans(),
        **hu.gcs
    )
    def test_sparse_lengths_sum_cpu(
        self,
        batchsize,
        fptype,
        fp16asint,

            

Reported by Pylint.

Argument name "dc" doesn't conform to snake_case naming style
Error

Line: 23 Column: 5

                      normalize_by_lengths=st.booleans(),
        empty_indices=st.booleans(),
        **hu.gcs
    )
    def test_sparse_lengths_sum_cpu(
        self,
        batchsize,
        fptype,
        fp16asint,

            

Reported by Pylint.

caffe2/python/memonger_test.py
95 issues
Unable to import 'hypothesis.strategies'
Error

Line: 7 Column: 1

              from caffe2.proto import caffe2_pb2
import caffe2.python.hypothesis_test_util as hu
from future.utils import viewvalues
import hypothesis.strategies as st
from hypothesis import given, settings
import unittest


def has_blob(proto, needle):

            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 8 Column: 1

              import caffe2.python.hypothesis_test_util as hu
from future.utils import viewvalues
import hypothesis.strategies as st
from hypothesis import given, settings
import unittest


def has_blob(proto, needle):
    for op in proto.op:

            

Reported by Pylint.

Too many arguments for format string
Error

Line: 131 Column: 40

                      fc = []
        for i in range(2):
            z = brew.fc(
                m, "data{}".format(i), "fc".format(i), dim_in=2, dim_out=2)
            fc.append(z)
        r = []
        # Trick is here to have same input appear twice in a same Sum
        for x in fc:
            for y in fc:

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 267 Column: 9

                      device_crossers = device_blobs[caffe2_pb2.CPU].intersection(
            device_blobs[workspace.GpuDeviceType]
        )
        self.assertEquals(device_crossers, set())

    @given(input_dim=st.integers(min_value=4, max_value=4),
           output_dim=st.integers(min_value=4, max_value=4),
           batch_size=st.integers(min_value=4, max_value=4))
    @settings(deadline=1000)

            

Reported by Pylint.

Unused variable 'batch_size'
Error

Line: 460 Column: 9

                  def test_forward_optim_tree_dag_traversal(self):
        input_dim = 4
        output_dim = 4
        batch_size = 4

        m = model_helper.ModelHelper()
        m.Proto().type = "dag"
        m.Proto().num_workers = 4


            

Reported by Pylint.

Unused variable 'labels'
Error

Line: 541 Column: 22

                      from caffe2.python import rnn_cell
        T = 5
        model = model_helper.ModelHelper()
        seq_lengths, labels = \
            model.net.AddExternalInputs(
                'seq_lengths', 'labels',
            )
        init_blobs = []
        for i in range(2):

            

Reported by Pylint.

Unused variable 'last_hidden'
Error

Line: 553 Column: 17

                          )
            init_blobs.extend([hidden_init, cell_init])
        model.param_init_net.ConstantFill([], ["input"], shape=[T, 4, 10])
        output, last_hidden, _, last_state = rnn_cell.LSTM(
            model=model,
            input_blob="input",
            seq_lengths=seq_lengths,
            initial_states=init_blobs,
            dim_in=10,

            

Reported by Pylint.

Unused variable 'last_state'
Error

Line: 553 Column: 33

                          )
            init_blobs.extend([hidden_init, cell_init])
        model.param_init_net.ConstantFill([], ["input"], shape=[T, 4, 10])
        output, last_hidden, _, last_state = rnn_cell.LSTM(
            model=model,
            input_blob="input",
            seq_lengths=seq_lengths,
            initial_states=init_blobs,
            dim_in=10,

            

Reported by Pylint.

Unused variable 'softmax'
Error

Line: 565 Column: 9

                          drop_states=True,
            return_last_layer_only=True,
        )
        softmax, loss = model.net.SoftmaxWithLoss(
            [model.Flatten(output), "labels"],
            ['softmax', 'loss'],
        )

        model.AddGradientOperators([loss])

            

Reported by Pylint.

Unused argument 'batch_size'
Error

Line: 711 Column: 65

                  @given(input_dim=st.integers(min_value=4, max_value=4),
           output_dim=st.integers(min_value=4, max_value=4),
           batch_size=st.integers(min_value=4, max_value=4))
    def test_verify_graph_equality(self, input_dim, output_dim, batch_size):
        m = model_helper.ModelHelper()
        m.Proto().type = "dag"
        m.Proto().num_workers = 4
        with core.NameScope("name_x"):
            fc1 = brew.fc(m, "data", "x", dim_in=input_dim, dim_out=output_dim)

            

Reported by Pylint.

tools/autograd/load_derivatives.py
94 issues
function already defined line 196
Error

Line: 204 Column: 17

                          for arg in args_with_derivatives:
                arg_name = arg.name

                def repl(m: Any) -> str:
                    return f"{m.group(1)}{arg_name}_p{m.group(2)}"
                fw_formula = re.sub(IDENT_REGEX.format(arg_name), repl, fw_formula)

            # Do the final conjugate 3)
            fw_formula = f"({fw_formula}).conj()"

            

Reported by Pylint.

Using the global statement
Error

Line: 26 Column: 5

              
def load_derivatives(derivatives_yaml_path: str, native_yaml_path: str) -> Sequence[DifferentiabilityInfo]:
    # Do some caching as this is a deterministic function
    global _GLOBAL_LOAD_DERIVATIVE_CACHE
    key = (derivatives_yaml_path, native_yaml_path)
    if key not in _GLOBAL_LOAD_DERIVATIVE_CACHE:

        with open(derivatives_yaml_path, 'r') as f:
            definitions = yaml.load(f, Loader=YamlLoader)

            

Reported by Pylint.

Use of unsafe yaml load. Allows instantiation of arbitrary objects. Consider yaml.safe_load().
Security criptography

Line: 31
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b506_yaml_load.html

                  if key not in _GLOBAL_LOAD_DERIVATIVE_CACHE:

        with open(derivatives_yaml_path, 'r') as f:
            definitions = yaml.load(f, Loader=YamlLoader)

        functions = parse_native_yaml(native_yaml_path).native_functions

        # What's the difference between function schema v.s. signature?
        # function schema is the complete declaration including mutability annotation / default value and etc.

            

Reported by Bandit.

Cell variable input_name defined in loop
Error

Line: 197 Column: 39

              
            # Do replacement 1) of the grad
            def repl(m: Any) -> str:
                return f"{m.group(1)}{input_name}_t.conj(){m.group(2)}"
            fw_formula = re.sub(IDENT_REGEX.format("grad"), repl, backward_formula)

            # Do replacement 2) of the input variables
            for arg in args_with_derivatives:
                arg_name = arg.name

            

Reported by Pylint.

Cell variable arg_name defined in loop
Error

Line: 205 Column: 43

                              arg_name = arg.name

                def repl(m: Any) -> str:
                    return f"{m.group(1)}{arg_name}_p{m.group(2)}"
                fw_formula = re.sub(IDENT_REGEX.format(arg_name), repl, fw_formula)

            # Do the final conjugate 3)
            fw_formula = f"({fw_formula}).conj()"


            

Reported by Pylint.

TODO: do we need eagerly calculate and save it here? Can it be derived
Error

Line: 366 Column: 3

                                             f'and differentiable variables: {overlap}')

        # Next, let us determine the list of inputs in order.
        # TODO: do we need eagerly calculate and save it here? Can it be derived
        # from NativeFunction and `derivatives` on callsites instead?
        args_with_derivatives = [a for a in cpp_arguments(f) if a.name in args_with_derivatives_set]

        # Postprocess forward derivatives definitions now that we know the differentiable arguments
        forward_derivatives = postprocess_forward_derivatives(f, defn_name, all_arg_names, derivatives,

            

Reported by Pylint.

TODO: maybe the logic to handle the legacy schema is no longer necessary?
Error

Line: 405 Column: 3

              
    # now map this to the legacy schema; this isn't technically necessary, but we'd need some logic here
    # to map in-place schemas to the out-of-place variants.
    # TODO: maybe the logic to handle the legacy schema is no longer necessary?
    signature = schema_function.func.signature()
    functions = functions_by_signature[signature]
    if len(functions) == 0:
        avail = '\n'.join(str(k) for k, v in functions_by_signature.items() if cpp.name(k) == defn_name)
        raise RuntimeError(f'could not find ATen function for legacy signature: {signature} '

            

Reported by Pylint.

Second argument of isinstance is not a type
Error

Line: 533 Column: 36

                  saved: List[SavedAttribute] = []

    for nctype in nctypes:
        name = nctype.name.name if isinstance(nctype.name, SpecialArgName) else nctype.name
        # First search the formula for expressions which can be evaluated
        # when the autograd Function is created to avoid saving variables
        for regex, info in REPLACEMENTS:
            def repl(m: Match[str]) -> str:
                suffix: str = info['suffix'](m) if callable(info['suffix']) else info['suffix']

            

Reported by Pylint.

Cell variable info defined in loop
Error

Line: 538 Column: 61

                      # when the autograd Function is created to avoid saving variables
        for regex, info in REPLACEMENTS:
            def repl(m: Match[str]) -> str:
                suffix: str = info['suffix'](m) if callable(info['suffix']) else info['suffix']
                expr: str = info['expr'](name) if 'expr' in info else m.group(0)
                saved.append(SavedAttribute(
                    nctype=info['nctype'](name + suffix),
                    expr=expr,
                ))

            

Reported by Pylint.

Cell variable info defined in loop
Error

Line: 538 Column: 31

                      # when the autograd Function is created to avoid saving variables
        for regex, info in REPLACEMENTS:
            def repl(m: Match[str]) -> str:
                suffix: str = info['suffix'](m) if callable(info['suffix']) else info['suffix']
                expr: str = info['expr'](name) if 'expr' in info else m.group(0)
                saved.append(SavedAttribute(
                    nctype=info['nctype'](name + suffix),
                    expr=expr,
                ))

            

Reported by Pylint.

test/jit/test_symbolic_shape_analysis.py
94 issues
Unable to import 'torch'
Error

Line: 1 Column: 1

              import torch
from torch.testing._internal.jit_utils import JitTestCase
import operator

from torch.testing import FileCheck
from typing import List


if __name__ == '__main__':

            

Reported by Pylint.

Unable to import 'torch.testing._internal.jit_utils'
Error

Line: 2 Column: 1

              import torch
from torch.testing._internal.jit_utils import JitTestCase
import operator

from torch.testing import FileCheck
from typing import List


if __name__ == '__main__':

            

Reported by Pylint.

Unable to import 'torch.testing'
Error

Line: 5 Column: 1

              from torch.testing._internal.jit_utils import JitTestCase
import operator

from torch.testing import FileCheck
from typing import List


if __name__ == '__main__':
    raise RuntimeError("This test file is not meant to be run directly, use:\n\n"

            

Reported by Pylint.

function already defined line 87
Error

Line: 95 Column: 9

                      FileCheck().check("Tensor = aten::mul").run(foo.graph)

        @torch.jit.script
        def foo(y):
            x = [1, 2, 3, 4]
            x[0] = 5
            return y.view(x)

        torch._C._jit_pass_propagate_shapes_on_graph(foo.graph)

            

Reported by Pylint.

XXX: still in prototype
Error

Line: 14 Column: 3

                                     "\tpython test/test_jit.py TESTNAME\n\n"
                       "instead.")

# XXX: still in prototype
class TestSymbolicShapeAnalysis(JitTestCase):
    def setUp(self):
        self.prev_symbolic_shapes_test_enabled = torch._C._jit_symbolic_shapes_test_mode_enabled()
        torch._C._jit_set_symbolic_shapes_test_mode(True)


            

Reported by Pylint.

Access to a protected member _jit_symbolic_shapes_test_mode_enabled of a client class
Error

Line: 17 Column: 50

              # XXX: still in prototype
class TestSymbolicShapeAnalysis(JitTestCase):
    def setUp(self):
        self.prev_symbolic_shapes_test_enabled = torch._C._jit_symbolic_shapes_test_mode_enabled()
        torch._C._jit_set_symbolic_shapes_test_mode(True)

    def tearDown(self):
        torch._C._jit_set_symbolic_shapes_test_mode(self.prev_symbolic_shapes_test_enabled)


            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 17 Column: 50

              # XXX: still in prototype
class TestSymbolicShapeAnalysis(JitTestCase):
    def setUp(self):
        self.prev_symbolic_shapes_test_enabled = torch._C._jit_symbolic_shapes_test_mode_enabled()
        torch._C._jit_set_symbolic_shapes_test_mode(True)

    def tearDown(self):
        torch._C._jit_set_symbolic_shapes_test_mode(self.prev_symbolic_shapes_test_enabled)


            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 18 Column: 9

              class TestSymbolicShapeAnalysis(JitTestCase):
    def setUp(self):
        self.prev_symbolic_shapes_test_enabled = torch._C._jit_symbolic_shapes_test_mode_enabled()
        torch._C._jit_set_symbolic_shapes_test_mode(True)

    def tearDown(self):
        torch._C._jit_set_symbolic_shapes_test_mode(self.prev_symbolic_shapes_test_enabled)

    def test_shape_analysis(self):

            

Reported by Pylint.

Access to a protected member _jit_set_symbolic_shapes_test_mode of a client class
Error

Line: 18 Column: 9

              class TestSymbolicShapeAnalysis(JitTestCase):
    def setUp(self):
        self.prev_symbolic_shapes_test_enabled = torch._C._jit_symbolic_shapes_test_mode_enabled()
        torch._C._jit_set_symbolic_shapes_test_mode(True)

    def tearDown(self):
        torch._C._jit_set_symbolic_shapes_test_mode(self.prev_symbolic_shapes_test_enabled)

    def test_shape_analysis(self):

            

Reported by Pylint.

Access to a protected member _jit_set_symbolic_shapes_test_mode of a client class
Error

Line: 21 Column: 9

                      torch._C._jit_set_symbolic_shapes_test_mode(True)

    def tearDown(self):
        torch._C._jit_set_symbolic_shapes_test_mode(self.prev_symbolic_shapes_test_enabled)

    def test_shape_analysis(self):
        @torch.jit.script
        def foo(x, y):
            return x * y

            

Reported by Pylint.

benchmarks/operator_benchmark/pt/conv_test.py
93 issues
Unable to import 'torch'
Error

Line: 3 Column: 1

              
import operator_benchmark as op_bench
import torch
import torch.nn as nn

from pt import configs

"""
Microbenchmarks for Conv1d and ConvTranspose1d operators.

            

Reported by Pylint.

Unable to import 'torch.nn'
Error

Line: 4 Column: 1

              
import operator_benchmark as op_bench
import torch
import torch.nn as nn

from pt import configs

"""
Microbenchmarks for Conv1d and ConvTranspose1d operators.

            

Reported by Pylint.

Unable to import 'pt'
Error

Line: 6 Column: 1

              import torch
import torch.nn as nn

from pt import configs

"""
Microbenchmarks for Conv1d and ConvTranspose1d operators.
"""


            

Reported by Pylint.

Module 'operator_benchmark' has no 'TorchBenchmarkBase' member
Error

Line: 12 Column: 23

              Microbenchmarks for Conv1d and ConvTranspose1d operators.
"""

class Conv1dBenchmark(op_bench.TorchBenchmarkBase):
    def init(self, IC, OC, kernel, stride, N, L, device):
        self.inputs = {
            "input": torch.rand(N, IC, L, device=device, requires_grad=self.auto_set())
        }
        self.conv1d = nn.Conv1d(IC, OC, kernel, stride=stride).to(device=device)

            

Reported by Pylint.

Module 'operator_benchmark' has no 'TorchBenchmarkBase' member
Error

Line: 24 Column: 32

                      return self.conv1d(input)


class ConvTranspose1dBenchmark(op_bench.TorchBenchmarkBase):
    def init(self, IC, OC, kernel, stride, N, L, device):
        self.inputs = {
            "input": torch.rand(N, IC, L, device=device)
        }
        self.convtranspose1d = nn.ConvTranspose1d(IC, OC, kernel, stride=stride).to(device=device)

            

Reported by Pylint.

Module 'operator_benchmark' has no 'generate_pt_test' member
Error

Line: 36 Column: 1

                      return self.convtranspose1d(input)


op_bench.generate_pt_test(configs.conv_1d_configs_short + configs.conv_1d_configs_long,
                          Conv1dBenchmark)
op_bench.generate_pt_test(configs.conv_1d_configs_short + configs.conv_1d_configs_long,
                          ConvTranspose1dBenchmark)



            

Reported by Pylint.

Module 'operator_benchmark' has no 'generate_pt_test' member
Error

Line: 38 Column: 1

              
op_bench.generate_pt_test(configs.conv_1d_configs_short + configs.conv_1d_configs_long,
                          Conv1dBenchmark)
op_bench.generate_pt_test(configs.conv_1d_configs_short + configs.conv_1d_configs_long,
                          ConvTranspose1dBenchmark)


"""
Microbenchmarks for Conv2d and ConvTranspose2d operators.

            

Reported by Pylint.

Module 'operator_benchmark' has no 'TorchBenchmarkBase' member
Error

Line: 47 Column: 23

              """


class Conv2dBenchmark(op_bench.TorchBenchmarkBase):
    def init(self, IC, OC, kernel, stride, N, H, W, G, pad, device):
        self.inputs = {
            "input": torch.rand(N, IC, H, W, device=device)
        }
        self.conv2d = nn.Conv2d(

            

Reported by Pylint.

Module 'operator_benchmark' has no 'TorchBenchmarkBase' member
Error

Line: 60 Column: 32

                      return self.conv2d(input)


class ConvTranspose2dBenchmark(op_bench.TorchBenchmarkBase):
    def init(self, IC, OC, kernel, stride, N, H, W, G, pad, device):
        self.inputs = {
            "input": torch.rand(N, IC, H, W, device=device)
        }
        self.convtranspose2d = nn.ConvTranspose2d(

            

Reported by Pylint.

Module 'operator_benchmark' has no 'generate_pt_test' member
Error

Line: 73 Column: 1

                      return self.convtranspose2d(input)


op_bench.generate_pt_test(configs.conv_2d_configs_short + configs.conv_2d_configs_long,
                          Conv2dBenchmark)
op_bench.generate_pt_test(configs.conv_2d_configs_short + configs.conv_2d_configs_long,
                          ConvTranspose2dBenchmark)



            

Reported by Pylint.