The following issues were found

torch/utils/data/sampler.py
30 issues
Module 'torch' has no 'Generator' member
Error

Line: 115 Column: 30

                  def __iter__(self) -> Iterator[int]:
        n = len(self.data_source)
        if self.generator is None:
            self.generator = torch.Generator()
            self.generator.manual_seed(int(torch.empty((), dtype=torch.int64).random_().item()))

        if self.replacement:
            for _ in range(self.num_samples // 32):
                yield from torch.randint(high=n, size=(32,), dtype=torch.int64, generator=self.generator).tolist()

            

Reported by Pylint.

Module 'torch' has no 'empty' member
Error

Line: 116 Column: 44

                      n = len(self.data_source)
        if self.generator is None:
            self.generator = torch.Generator()
            self.generator.manual_seed(int(torch.empty((), dtype=torch.int64).random_().item()))

        if self.replacement:
            for _ in range(self.num_samples // 32):
                yield from torch.randint(high=n, size=(32,), dtype=torch.int64, generator=self.generator).tolist()
            yield from torch.randint(high=n, size=(self.num_samples % 32,), dtype=torch.int64, generator=self.generator).tolist()

            

Reported by Pylint.

Module 'torch' has no 'int64' member
Error

Line: 116 Column: 66

                      n = len(self.data_source)
        if self.generator is None:
            self.generator = torch.Generator()
            self.generator.manual_seed(int(torch.empty((), dtype=torch.int64).random_().item()))

        if self.replacement:
            for _ in range(self.num_samples // 32):
                yield from torch.randint(high=n, size=(32,), dtype=torch.int64, generator=self.generator).tolist()
            yield from torch.randint(high=n, size=(self.num_samples % 32,), dtype=torch.int64, generator=self.generator).tolist()

            

Reported by Pylint.

Module 'torch' has no 'randint' member
Error

Line: 120 Column: 28

              
        if self.replacement:
            for _ in range(self.num_samples // 32):
                yield from torch.randint(high=n, size=(32,), dtype=torch.int64, generator=self.generator).tolist()
            yield from torch.randint(high=n, size=(self.num_samples % 32,), dtype=torch.int64, generator=self.generator).tolist()
        else:
            yield from torch.randperm(n, generator=self.generator).tolist()

    def __len__(self) -> int:

            

Reported by Pylint.

Module 'torch' has no 'int64' member
Error

Line: 120 Column: 68

              
        if self.replacement:
            for _ in range(self.num_samples // 32):
                yield from torch.randint(high=n, size=(32,), dtype=torch.int64, generator=self.generator).tolist()
            yield from torch.randint(high=n, size=(self.num_samples % 32,), dtype=torch.int64, generator=self.generator).tolist()
        else:
            yield from torch.randperm(n, generator=self.generator).tolist()

    def __len__(self) -> int:

            

Reported by Pylint.

Module 'torch' has no 'randint' member
Error

Line: 121 Column: 24

                      if self.replacement:
            for _ in range(self.num_samples // 32):
                yield from torch.randint(high=n, size=(32,), dtype=torch.int64, generator=self.generator).tolist()
            yield from torch.randint(high=n, size=(self.num_samples % 32,), dtype=torch.int64, generator=self.generator).tolist()
        else:
            yield from torch.randperm(n, generator=self.generator).tolist()

    def __len__(self) -> int:
        return self.num_samples

            

Reported by Pylint.

Module 'torch' has no 'int64' member
Error

Line: 121 Column: 83

                      if self.replacement:
            for _ in range(self.num_samples // 32):
                yield from torch.randint(high=n, size=(32,), dtype=torch.int64, generator=self.generator).tolist()
            yield from torch.randint(high=n, size=(self.num_samples % 32,), dtype=torch.int64, generator=self.generator).tolist()
        else:
            yield from torch.randperm(n, generator=self.generator).tolist()

    def __len__(self) -> int:
        return self.num_samples

            

Reported by Pylint.

Module 'torch' has no 'randperm' member
Error

Line: 123 Column: 24

                              yield from torch.randint(high=n, size=(32,), dtype=torch.int64, generator=self.generator).tolist()
            yield from torch.randint(high=n, size=(self.num_samples % 32,), dtype=torch.int64, generator=self.generator).tolist()
        else:
            yield from torch.randperm(n, generator=self.generator).tolist()

    def __len__(self) -> int:
        return self.num_samples



            

Reported by Pylint.

Module 'torch' has no 'randperm' member
Error

Line: 143 Column: 42

                      self.generator = generator

    def __iter__(self) -> Iterator[int]:
        return (self.indices[i] for i in torch.randperm(len(self.indices), generator=self.generator))

    def __len__(self) -> int:
        return len(self.indices)



            

Reported by Pylint.

Module 'torch' has no 'double' member
Error

Line: 179 Column: 55

                      if not isinstance(replacement, bool):
            raise ValueError("replacement should be a boolean value, but got "
                             "replacement={}".format(replacement))
        self.weights = torch.as_tensor(weights, dtype=torch.double)
        self.num_samples = num_samples
        self.replacement = replacement
        self.generator = generator

    def __iter__(self) -> Iterator[int]:

            

Reported by Pylint.

torch/distributed/rpc/__init__.py
30 issues
Unable to import 'torch._C._distributed_c10d'
Error

Line: 26 Column: 5

              

if is_available():
    from torch._C._distributed_c10d import Store
    from torch._C._distributed_rpc import (
        _disable_jit_rref_pickle,
        _enable_jit_rref_pickle,
        _disable_server_process_global_profiler,
        _enable_server_process_global_profiler,

            

Reported by Pylint.

Unable to import 'torch._C._distributed_rpc'
Error

Line: 27 Column: 5

              
if is_available():
    from torch._C._distributed_c10d import Store
    from torch._C._distributed_rpc import (
        _disable_jit_rref_pickle,
        _enable_jit_rref_pickle,
        _disable_server_process_global_profiler,
        _enable_server_process_global_profiler,
        _set_and_start_rpc_agent,

            

Reported by Pylint.

Unable to import '__init__.api'
Error

Line: 64 Column: 5

                  )  # noqa: F401

    from . import api, backend_registry, functions
    from .api import *  # noqa: F401,F403
    import numbers

    import torch.distributed.autograd as dist_autograd

    from .backend_registry import BackendType

            

Reported by Pylint.

Unable to import '__init__.backend_registry'
Error

Line: 69 Column: 5

              
    import torch.distributed.autograd as dist_autograd

    from .backend_registry import BackendType
    from .options import TensorPipeRpcBackendOptions  # noqa: F401
    from .server_process_global_profiler import (
        _server_process_global_profile,
    )


            

Reported by Pylint.

Unable to import '__init__.options'
Error

Line: 70 Column: 5

                  import torch.distributed.autograd as dist_autograd

    from .backend_registry import BackendType
    from .options import TensorPipeRpcBackendOptions  # noqa: F401
    from .server_process_global_profiler import (
        _server_process_global_profile,
    )

    rendezvous_iterator: Generator[Tuple[Store, int, int], None, None]

            

Reported by Pylint.

Unable to import '__init__.server_process_global_profiler'
Error

Line: 71 Column: 5

              
    from .backend_registry import BackendType
    from .options import TensorPipeRpcBackendOptions  # noqa: F401
    from .server_process_global_profiler import (
        _server_process_global_profile,
    )

    rendezvous_iterator: Generator[Tuple[Store, int, int], None, None]


            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 18 Column: 20

              

def is_available():
    return hasattr(torch._C, "_rpc_init")


if is_available() and not torch._C._rpc_init():
    raise RuntimeError("Failed to initialize torch.distributed.rpc")


            

Reported by Pylint.

Access to a protected member _rpc_init of a client class
Error

Line: 21 Column: 27

                  return hasattr(torch._C, "_rpc_init")


if is_available() and not torch._C._rpc_init():
    raise RuntimeError("Failed to initialize torch.distributed.rpc")


if is_available():
    from torch._C._distributed_c10d import Store

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 21 Column: 27

                  return hasattr(torch._C, "_rpc_init")


if is_available() and not torch._C._rpc_init():
    raise RuntimeError("Failed to initialize torch.distributed.rpc")


if is_available():
    from torch._C._distributed_c10d import Store

            

Reported by Pylint.

Module import itself
Error

Line: 63 Column: 5

                      _DEFAULT_RPC_TIMEOUT_SEC,
    )  # noqa: F401

    from . import api, backend_registry, functions
    from .api import *  # noqa: F401,F403
    import numbers

    import torch.distributed.autograd as dist_autograd


            

Reported by Pylint.

test/test_namedtuple_return_api.py
30 issues
Unable to import 'torch'
Error

Line: 5 Column: 1

              import re
import yaml
import textwrap
import torch

from torch.testing._internal.common_utils import TestCase, run_tests
from collections import namedtuple



            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 7 Column: 1

              import textwrap
import torch

from torch.testing._internal.common_utils import TestCase, run_tests
from collections import namedtuple


path = os.path.dirname(os.path.realpath(__file__))
aten_native_yaml = os.path.join(path, '../aten/src/ATen/native/native_functions.yaml')

            

Reported by Pylint.

Access to a protected member _VF of a client class
Error

Line: 114 Column: 23

                              mod = torch.linalg
                f = f[7:]
            if f.startswith('_'):
                mod = torch._VF
            return getattr(mod, f, None)

        def check_namedtuple(tup, names):
            "Check that the namedtuple 'tup' has the given names"
            for i, name in enumerate(names):

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import os
import re
import yaml
import textwrap
import torch

from torch.testing._internal.common_utils import TestCase, run_tests
from collections import namedtuple


            

Reported by Pylint.

standard import "import textwrap" should be placed before "import yaml"
Error

Line: 4 Column: 1

              import os
import re
import yaml
import textwrap
import torch

from torch.testing._internal.common_utils import TestCase, run_tests
from collections import namedtuple


            

Reported by Pylint.

standard import "from collections import namedtuple" should be placed before "import yaml"
Error

Line: 8 Column: 1

              import torch

from torch.testing._internal.common_utils import TestCase, run_tests
from collections import namedtuple


path = os.path.dirname(os.path.realpath(__file__))
aten_native_yaml = os.path.join(path, '../aten/src/ATen/native/native_functions.yaml')
all_operators_with_namedtuple_return = {

            

Reported by Pylint.

Line too long (101/100)
Error

Line: 18 Column: 1

                  'qr', 'geqrf', 'solve', 'slogdet', 'sort', 'topk', 'lstsq', 'linalg_inv_ex',
    'triangular_solve', 'cummax', 'cummin', 'linalg_eigh', "_unpack_dual", 'linalg_qr',
    '_svd_helper', 'linalg_svd', 'linalg_slogdet', 'fake_quantize_per_tensor_affine_cachemask',
    'fake_quantize_per_channel_affine_cachemask', 'linalg_lstsq', 'linalg_eig', 'linalg_cholesky_ex',
    'frexp', 'lu_unpack', 'histogram', '_fake_quantize_per_tensor_affine_cachemask_tensor_qparams',
    '_fused_moving_avg_obs_fq_helper',
    '_det_lu_based_helper',
    '_lu_with_info',
}

            

Reported by Pylint.

Missing class docstring
Error

Line: 26 Column: 1

              }


class TestNamedTupleAPI(TestCase):

    def test_native_functions_yaml(self):
        operators_found = set()
        regex = re.compile(r"^(\w*)(\(|\.)")
        file = open(aten_native_yaml, 'r')

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 28 Column: 5

              
class TestNamedTupleAPI(TestCase):

    def test_native_functions_yaml(self):
        operators_found = set()
        regex = re.compile(r"^(\w*)(\(|\.)")
        file = open(aten_native_yaml, 'r')
        for f in yaml.safe_load(file.read()):
            f = f['func']

            

Reported by Pylint.

Variable name "f" doesn't conform to snake_case naming style
Error

Line: 32 Column: 13

                      operators_found = set()
        regex = re.compile(r"^(\w*)(\(|\.)")
        file = open(aten_native_yaml, 'r')
        for f in yaml.safe_load(file.read()):
            f = f['func']
            ret = f.split('->')[1].strip()
            name = regex.findall(f)[0][0]
            if name in all_operators_with_namedtuple_return:
                operators_found.add(name)

            

Reported by Pylint.

test/fx/test_fx_param_shape_control_flow.py
30 issues
Unable to import 'torch'
Error

Line: 2 Column: 1

              import unittest
import torch
import torch.fx


class MyModuleBase(torch.nn.Module):
    def forward(self, x):
        matrx = self.get_mul_matrix()
        if self.no_relu():

            

Reported by Pylint.

Unable to import 'torch.fx'
Error

Line: 3 Column: 1

              import unittest
import torch
import torch.fx


class MyModuleBase(torch.nn.Module):
    def forward(self, x):
        matrx = self.get_mul_matrix()
        if self.no_relu():

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import unittest
import torch
import torch.fx


class MyModuleBase(torch.nn.Module):
    def forward(self, x):
        matrx = self.get_mul_matrix()
        if self.no_relu():

            

Reported by Pylint.

Missing class docstring
Error

Line: 6 Column: 1

              import torch.fx


class MyModuleBase(torch.nn.Module):
    def forward(self, x):
        matrx = self.get_mul_matrix()
        if self.no_relu():
            return torch.mm(x, matrx)
        else:

            

Reported by Pylint.

Argument name "x" doesn't conform to snake_case naming style
Error

Line: 7 Column: 5

              

class MyModuleBase(torch.nn.Module):
    def forward(self, x):
        matrx = self.get_mul_matrix()
        if self.no_relu():
            return torch.mm(x, matrx)
        else:
            return torch.relu(torch.mm(x, matrx))

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 7 Column: 5

              

class MyModuleBase(torch.nn.Module):
    def forward(self, x):
        matrx = self.get_mul_matrix()
        if self.no_relu():
            return torch.mm(x, matrx)
        else:
            return torch.relu(torch.mm(x, matrx))

            

Reported by Pylint.

Unnecessary "else" after "return"
Error

Line: 9 Column: 9

              class MyModuleBase(torch.nn.Module):
    def forward(self, x):
        matrx = self.get_mul_matrix()
        if self.no_relu():
            return torch.mm(x, matrx)
        else:
            return torch.relu(torch.mm(x, matrx))

    def get_mul_matrix(self):

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 14 Column: 5

                      else:
            return torch.relu(torch.mm(x, matrx))

    def get_mul_matrix(self):
        return self.param

    def no_relu(self):
        raise Exception("not implemented")


            

Reported by Pylint.

Missing function or method docstring
Error

Line: 17 Column: 5

                  def get_mul_matrix(self):
        return self.param

    def no_relu(self):
        raise Exception("not implemented")

class MyModuleParamShape(MyModuleBase):
    def __init__(self, in_channels):
        super().__init__()

            

Reported by Pylint.

Method could be a function
Error

Line: 17 Column: 5

                  def get_mul_matrix(self):
        return self.param

    def no_relu(self):
        raise Exception("not implemented")

class MyModuleParamShape(MyModuleBase):
    def __init__(self, in_channels):
        super().__init__()

            

Reported by Pylint.

benchmarks/fastrnns/scratch.py
30 issues
Unable to import 'torch'
Error

Line: 1 Column: 1

              import torch


@torch.jit.script
def fn(x, scale, shift):
    return scale * x / shift


@torch.jit.script

            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 27 Column: 1

              recurrent.graph_for(x, scale, shift)


import torch


@torch.jit.script
def recurrent_scaleshift(x, scale, shift):
    y = x

            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 46 Column: 1

              recurrent_scaleshift.graph_for(x, scale, shift)


import torch
x = torch.tensor([])
x.requires_grad = True
x.mean().backward()  # no error triggered
x = x.cuda()
x.mean().backward()

            

Reported by Pylint.

Redefining name 'x' from outer scope (line 17)
Error

Line: 5 Column: 8

              

@torch.jit.script
def fn(x, scale, shift):
    return scale * x / shift


@torch.jit.script
def recurrent(x, scale, shift):

            

Reported by Pylint.

Redefining name 'shift' from outer scope (line 19)
Error

Line: 5 Column: 18

              

@torch.jit.script
def fn(x, scale, shift):
    return scale * x / shift


@torch.jit.script
def recurrent(x, scale, shift):

            

Reported by Pylint.

Redefining name 'scale' from outer scope (line 18)
Error

Line: 5 Column: 11

              

@torch.jit.script
def fn(x, scale, shift):
    return scale * x / shift


@torch.jit.script
def recurrent(x, scale, shift):

            

Reported by Pylint.

Redefining name 'scale' from outer scope (line 18)
Error

Line: 10 Column: 18

              

@torch.jit.script
def recurrent(x, scale, shift):
    y = x
    for i in range(100):
        y = fn(y, scale, shift)
    return y


            

Reported by Pylint.

Redefining name 'shift' from outer scope (line 19)
Error

Line: 10 Column: 25

              

@torch.jit.script
def recurrent(x, scale, shift):
    y = x
    for i in range(100):
        y = fn(y, scale, shift)
    return y


            

Reported by Pylint.

Redefining name 'x' from outer scope (line 17)
Error

Line: 10 Column: 15

              

@torch.jit.script
def recurrent(x, scale, shift):
    y = x
    for i in range(100):
        y = fn(y, scale, shift)
    return y


            

Reported by Pylint.

Unused variable 'i'
Error

Line: 12 Column: 9

              @torch.jit.script
def recurrent(x, scale, shift):
    y = x
    for i in range(100):
        y = fn(y, scale, shift)
    return y


x = torch.randn(2, 2, device='cuda')

            

Reported by Pylint.

torch/utils/benchmark/utils/sparse_fuzzer.py
30 issues
Module 'torch' has no 'float32' member
Error

Line: 19 Column: 15

                      nnz: Optional[str] = None,
        density: Optional[str] = None,
        coalesced: Optional[str] = None,
        dtype=torch.float32,
        cuda=False
    ):
        """
        Args:
            name:

            

Reported by Pylint.

Module 'torch' has no 'rand' member
Error

Line: 75 Column: 17

                      assert all(size[d] > 0 for d in range(sparse_dim)) or nnz == 0, 'invalid arguments'
        v_size = [nnz] + list(size[sparse_dim:])
        if dtype.is_floating_point:
            v = torch.rand(size=v_size, dtype=dtype, device="cpu")
        else:
            v = torch.randint(1, 127, size=v_size, dtype=dtype, device="cpu")

        i = torch.rand(sparse_dim, nnz, device="cpu")
        i.mul_(torch.tensor(size[:sparse_dim]).unsqueeze(1).to(i))

            

Reported by Pylint.

Module 'torch' has no 'randint' member
Error

Line: 77 Column: 17

                      if dtype.is_floating_point:
            v = torch.rand(size=v_size, dtype=dtype, device="cpu")
        else:
            v = torch.randint(1, 127, size=v_size, dtype=dtype, device="cpu")

        i = torch.rand(sparse_dim, nnz, device="cpu")
        i.mul_(torch.tensor(size[:sparse_dim]).unsqueeze(1).to(i))
        i = i.to(torch.long)


            

Reported by Pylint.

Module 'torch' has no 'rand' member
Error

Line: 79 Column: 13

                      else:
            v = torch.randint(1, 127, size=v_size, dtype=dtype, device="cpu")

        i = torch.rand(sparse_dim, nnz, device="cpu")
        i.mul_(torch.tensor(size[:sparse_dim]).unsqueeze(1).to(i))
        i = i.to(torch.long)

        if not is_coalesced:
            v = torch.cat([v, torch.randn_like(v)], 0)

            

Reported by Pylint.

Module 'torch' has no 'tensor' member; maybe 'Tensor'?
Error

Line: 80 Column: 16

                          v = torch.randint(1, 127, size=v_size, dtype=dtype, device="cpu")

        i = torch.rand(sparse_dim, nnz, device="cpu")
        i.mul_(torch.tensor(size[:sparse_dim]).unsqueeze(1).to(i))
        i = i.to(torch.long)

        if not is_coalesced:
            v = torch.cat([v, torch.randn_like(v)], 0)
            i = torch.cat([i, i], 1)

            

Reported by Pylint.

Module 'torch' has no 'long' member
Error

Line: 81 Column: 18

              
        i = torch.rand(sparse_dim, nnz, device="cpu")
        i.mul_(torch.tensor(size[:sparse_dim]).unsqueeze(1).to(i))
        i = i.to(torch.long)

        if not is_coalesced:
            v = torch.cat([v, torch.randn_like(v)], 0)
            i = torch.cat([i, i], 1)


            

Reported by Pylint.

Module 'torch' has no 'cat' member
Error

Line: 84 Column: 17

                      i = i.to(torch.long)

        if not is_coalesced:
            v = torch.cat([v, torch.randn_like(v)], 0)
            i = torch.cat([i, i], 1)

        x = torch.sparse_coo_tensor(i, v, torch.Size(size))
        if is_coalesced:
            x = x.coalesce()

            

Reported by Pylint.

Module 'torch' has no 'randn_like' member
Error

Line: 84 Column: 31

                      i = i.to(torch.long)

        if not is_coalesced:
            v = torch.cat([v, torch.randn_like(v)], 0)
            i = torch.cat([i, i], 1)

        x = torch.sparse_coo_tensor(i, v, torch.Size(size))
        if is_coalesced:
            x = x.coalesce()

            

Reported by Pylint.

Module 'torch' has no 'cat' member
Error

Line: 85 Column: 17

              
        if not is_coalesced:
            v = torch.cat([v, torch.randn_like(v)], 0)
            i = torch.cat([i, i], 1)

        x = torch.sparse_coo_tensor(i, v, torch.Size(size))
        if is_coalesced:
            x = x.coalesce()
        return x

            

Reported by Pylint.

Module 'torch' has no 'sparse_coo_tensor' member
Error

Line: 87 Column: 13

                          v = torch.cat([v, torch.randn_like(v)], 0)
            i = torch.cat([i, i], 1)

        x = torch.sparse_coo_tensor(i, v, torch.Size(size))
        if is_coalesced:
            x = x.coalesce()
        return x

    def _make_tensor(self, params, state):

            

Reported by Pylint.

caffe2/python/operator_test/rank_loss_operator_test.py
30 issues
Unable to import 'hypothesis'
Error

Line: 7 Column: 1

              

from caffe2.python import core, workspace
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np


            

Reported by Pylint.

Unable to import 'hypothesis.strategies'
Error

Line: 10 Column: 1

              from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np


class TestPairWiseLossOps(serial.SerializedTestCase):
    @given(X=hu.arrays(dims=[2, 1],

            

Reported by Pylint.

Unused argument 'gc'
Error

Line: 21 Column: 57

                                         elements=st.integers(min_value=0, max_value=1),
                           dtype=np.float32),
           **hu.gcs_cpu_only)
    def test_pair_wise_loss_predictions(self, X, label, gc, dc):
        workspace.FeedBlob('X', X)
        workspace.FeedBlob('label', label)
        new_label = np.array([label[1], label[0]])
        new_x = np.array([X[1], X[0]])
        workspace.FeedBlob('new_x', new_x)

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 21 Column: 61

                                         elements=st.integers(min_value=0, max_value=1),
                           dtype=np.float32),
           **hu.gcs_cpu_only)
    def test_pair_wise_loss_predictions(self, X, label, gc, dc):
        workspace.FeedBlob('X', X)
        workspace.FeedBlob('label', label)
        new_label = np.array([label[1], label[0]])
        new_x = np.array([X[1], X[0]])
        workspace.FeedBlob('new_x', new_x)

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 58 Column: 62

                         dY=hu.arrays(dims=[1],
                        elements=hu.floats(min_value=1, max_value=10)),
           **hu.gcs_cpu_only)
    def test_pair_wise_loss_gradient(self, X, label, dY, gc, dc):
        workspace.FeedBlob('X', X)
        workspace.FeedBlob('dY', dY)
        workspace.FeedBlob('label', label)
        net = core.Net('net')
        net.PairWiseLossGradient(

            

Reported by Pylint.

Unused argument 'gc'
Error

Line: 58 Column: 58

                         dY=hu.arrays(dims=[1],
                        elements=hu.floats(min_value=1, max_value=10)),
           **hu.gcs_cpu_only)
    def test_pair_wise_loss_gradient(self, X, label, dY, gc, dc):
        workspace.FeedBlob('X', X)
        workspace.FeedBlob('dY', dY)
        workspace.FeedBlob('label', label)
        net = core.Net('net')
        net.PairWiseLossGradient(

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              




from caffe2.python import core, workspace
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial

            

Reported by Pylint.

Missing class docstring
Error

Line: 14 Column: 1

              import numpy as np


class TestPairWiseLossOps(serial.SerializedTestCase):
    @given(X=hu.arrays(dims=[2, 1],
                       elements=hu.floats(min_value=0.0, max_value=10.0)),
           label=hu.arrays(dims=[2, 1],
                           elements=st.integers(min_value=0, max_value=1),
                           dtype=np.float32),

            

Reported by Pylint.

Argument name "X" doesn't conform to snake_case naming style
Error

Line: 21 Column: 5

                                         elements=st.integers(min_value=0, max_value=1),
                           dtype=np.float32),
           **hu.gcs_cpu_only)
    def test_pair_wise_loss_predictions(self, X, label, gc, dc):
        workspace.FeedBlob('X', X)
        workspace.FeedBlob('label', label)
        new_label = np.array([label[1], label[0]])
        new_x = np.array([X[1], X[0]])
        workspace.FeedBlob('new_x', new_x)

            

Reported by Pylint.

Argument name "dc" doesn't conform to snake_case naming style
Error

Line: 21 Column: 5

                                         elements=st.integers(min_value=0, max_value=1),
                           dtype=np.float32),
           **hu.gcs_cpu_only)
    def test_pair_wise_loss_predictions(self, X, label, gc, dc):
        workspace.FeedBlob('X', X)
        workspace.FeedBlob('label', label)
        new_label = np.array([label[1], label[0]])
        new_x = np.array([X[1], X[0]])
        workspace.FeedBlob('new_x', new_x)

            

Reported by Pylint.

torch/fx/passes/split_module.py
29 issues
Unused argument 'root_m'
Error

Line: 28 Column: 5

              # Creates subgraphs out of main graph
def split_module(
    m: GraphModule,
    root_m: torch.nn.Module,
    split_callback: Callable[[torch.fx.node.Node], int],
):
    partitions: Dict[str, Partition] = {}
    orig_nodes: Dict[str, torch.fx.node.Node] = {}


            

Reported by Pylint.

TODO currently placeholders/parameters aren't put into random partitions,
Error

Line: 54 Column: 3

                  for node in m.graph.nodes:
        orig_nodes[node.name] = node

        # TODO currently placeholders/parameters aren't put into random partitions,
        # rather they're added to the graphs where they are used down below
        if node.op in ["placeholder", "get_attr"]:
            continue
        if node.op == 'output':
            torch.fx.graph.map_arg(node.args[0], lambda n: record_cross_partition_use(n, None))

            

Reported by Pylint.

Access to a protected member _fx_partition of a client class
Error

Line: 69 Column: 9

                          partitions[partition_name] = partition = Partition(partition_name)

        partition.node_names.append(node.name)
        node._fx_partition = partition_name

        torch.fx.graph.map_arg(node.args, lambda def_node: record_cross_partition_use(def_node, node))
        torch.fx.graph.map_arg(node.kwargs, lambda def_node: record_cross_partition_use(def_node, node))  # noqa: B950

    # find partitions with no dependencies

            

Reported by Pylint.

Cell variable node defined in loop
Error

Line: 71 Column: 97

                      partition.node_names.append(node.name)
        node._fx_partition = partition_name

        torch.fx.graph.map_arg(node.args, lambda def_node: record_cross_partition_use(def_node, node))
        torch.fx.graph.map_arg(node.kwargs, lambda def_node: record_cross_partition_use(def_node, node))  # noqa: B950

    # find partitions with no dependencies
    root_partitions : List[str] = []
    for partition_name, partition in partitions.items():

            

Reported by Pylint.

Cell variable node defined in loop
Error

Line: 72 Column: 99

                      node._fx_partition = partition_name

        torch.fx.graph.map_arg(node.args, lambda def_node: record_cross_partition_use(def_node, node))
        torch.fx.graph.map_arg(node.kwargs, lambda def_node: record_cross_partition_use(def_node, node))  # noqa: B950

    # find partitions with no dependencies
    root_partitions : List[str] = []
    for partition_name, partition in partitions.items():
        if not len(partition.partitions_dependent_on):

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 95 Column: 13

                  # add placeholders to parititons
    for partition_name in sorted_partitions:
        partition = partitions[partition_name]
        for input in partition.inputs:
            placeholder = partition.graph.placeholder(input)
            placeholder.meta = orig_nodes[input].meta.copy()
            partition.environment[orig_nodes[input]] = placeholder

    # Transform nodes and collect targets for partition's submodule

            

Reported by Pylint.

Access to a protected member _fx_partition of a client class
Error

Line: 103 Column: 36

                  # Transform nodes and collect targets for partition's submodule
    for node in m.graph.nodes:
        if hasattr(node, '_fx_partition'):
            partition = partitions[node._fx_partition]

            # swap out old graph nodes in kw/args with references to new nodes in this submodule
            environment = partition.environment
            gathered_args = torch.fx.graph.map_arg(node.args, lambda n : environment[n])
            gathered_kwargs = torch.fx.graph.map_arg(node.kwargs, lambda n : environment[n])

            

Reported by Pylint.

Cell variable environment defined in loop
Error

Line: 107 Column: 74

              
            # swap out old graph nodes in kw/args with references to new nodes in this submodule
            environment = partition.environment
            gathered_args = torch.fx.graph.map_arg(node.args, lambda n : environment[n])
            gathered_kwargs = torch.fx.graph.map_arg(node.kwargs, lambda n : environment[n])

            if node.op not in ['call_module', 'get_attr']:
                target = node.target
            else:

            

Reported by Pylint.

Cell variable environment defined in loop
Error

Line: 108 Column: 78

                          # swap out old graph nodes in kw/args with references to new nodes in this submodule
            environment = partition.environment
            gathered_args = torch.fx.graph.map_arg(node.args, lambda n : environment[n])
            gathered_kwargs = torch.fx.graph.map_arg(node.kwargs, lambda n : environment[n])

            if node.op not in ['call_module', 'get_attr']:
                target = node.target
            else:
                target_atoms = node.target.split('.')

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
from torch.fx.graph_module import GraphModule
from typing import Callable, List, Dict, Any, Optional

class Partition:
    def __init__(self, name: str):
        self.name: str = name
        self.node_names: List[str] = []
        self.inputs: Dict[str, None] = {}

            

Reported by Pylint.

test/ao/sparsity/test_scheduler.py
29 issues
Unable to import 'torch'
Error

Line: 2 Column: 1

              # -*- coding: utf-8 -*-
from torch import nn
from torch.ao.sparsity import WeightNormSparsifier
from torch.ao.sparsity import BaseScheduler, LambdaSL

from torch.testing._internal.common_utils import TestCase

import warnings


            

Reported by Pylint.

Unable to import 'torch.ao.sparsity'
Error

Line: 3 Column: 1

              # -*- coding: utf-8 -*-
from torch import nn
from torch.ao.sparsity import WeightNormSparsifier
from torch.ao.sparsity import BaseScheduler, LambdaSL

from torch.testing._internal.common_utils import TestCase

import warnings


            

Reported by Pylint.

Unable to import 'torch.ao.sparsity'
Error

Line: 4 Column: 1

              # -*- coding: utf-8 -*-
from torch import nn
from torch.ao.sparsity import WeightNormSparsifier
from torch.ao.sparsity import BaseScheduler, LambdaSL

from torch.testing._internal.common_utils import TestCase

import warnings


            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 6 Column: 1

              from torch.ao.sparsity import WeightNormSparsifier
from torch.ao.sparsity import BaseScheduler, LambdaSL

from torch.testing._internal.common_utils import TestCase

import warnings

class ImplementedScheduler(BaseScheduler):
    def get_sl(self):

            

Reported by Pylint.

Access to a protected member _step_count of a client class
Error

Line: 29 Column: 16

                      scheduler = ImplementedScheduler(sparsifier)

        assert scheduler.sparsifier is sparsifier
        assert scheduler._step_count == 1
        assert scheduler.base_sl == [sparsifier.module_groups[0]['sparsity_level']]

    def test_order_of_steps(self):
        """Checks if the warning is thrown if the scheduler step is called
        before the sparsifier step"""

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # -*- coding: utf-8 -*-
from torch import nn
from torch.ao.sparsity import WeightNormSparsifier
from torch.ao.sparsity import BaseScheduler, LambdaSL

from torch.testing._internal.common_utils import TestCase

import warnings


            

Reported by Pylint.

standard import "import warnings" should be placed before "from torch import nn"
Error

Line: 8 Column: 1

              
from torch.testing._internal.common_utils import TestCase

import warnings

class ImplementedScheduler(BaseScheduler):
    def get_sl(self):
        if self.last_epoch > 0:
            return [group['sparsity_level'] * 0.5

            

Reported by Pylint.

Missing class docstring
Error

Line: 10 Column: 1

              
import warnings

class ImplementedScheduler(BaseScheduler):
    def get_sl(self):
        if self.last_epoch > 0:
            return [group['sparsity_level'] * 0.5
                    for group in self.sparsifier.module_groups]
        else:

            

Reported by Pylint.

Too few public methods (1/2)
Error

Line: 10 Column: 1

              
import warnings

class ImplementedScheduler(BaseScheduler):
    def get_sl(self):
        if self.last_epoch > 0:
            return [group['sparsity_level'] * 0.5
                    for group in self.sparsifier.module_groups]
        else:

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 11 Column: 5

              import warnings

class ImplementedScheduler(BaseScheduler):
    def get_sl(self):
        if self.last_epoch > 0:
            return [group['sparsity_level'] * 0.5
                    for group in self.sparsifier.module_groups]
        else:
            return list(self.base_sl)

            

Reported by Pylint.

tools/autograd/gen_autograd_functions.py
29 issues
Attempted relative import beyond top-level package
Error

Line: 7 Column: 1

              #  Functions.h/cpp: subclasses of autograd::Node
#  python_functions.h/cpp: Python bindings for the above classes
#
from .gen_inplace_or_view_type import VIEW_FUNCTIONS

from typing import List, Sequence, Tuple

from tools.codegen.api.autograd import (Derivative, DifferentiabilityInfo,
                                        SavedAttribute, uses_retain_variables,

            

Reported by Pylint.

TODO: This is probably not exhaustive, but it's a start
Error

Line: 292 Column: 3

              # VIEW_FUNCTIONS are not traceable because they use as_strided, which
# has an untraceable backwards, see
# https://github.com/pytorch/pytorch/issues/4250
# TODO: This is probably not exhaustive, but it's a start
UNTRACEABLE_FUNCTIONS = VIEW_FUNCTIONS

def gen_autograd_functions_lib(
    out: str,
    differentiability_infos: Sequence[DifferentiabilityInfo],

            

Reported by Pylint.

Cell variable fname defined in loop
Error

Line: 316 Column: 79

                  for suffix in ['.h', '.cpp']:
        fname = file_basename + suffix
        fm.write_with_template(fname, fname, lambda: {
            'generated_comment': '@' + f'generated from {fm.template_dir}/' + fname,
            'autograd_function_declarations': declarations,
            'autograd_function_definitions': definitions,
        })

def gen_autograd_functions_python(

            

Reported by Pylint.

Redefining built-in 'type'
Error

Line: 377 Column: 9

              
    def save_var(var: SavedAttribute, is_output: bool) -> None:
        name = var.nctype.name
        type = var.nctype.type
        should_append_getsetdef = True
        should_append_raw_getsetdef = False

        if type == BaseCType(tensorT) or type == OptionalCType(BaseCType(tensorT)) or \
                type == MutRefCType(OptionalCType(BaseCType(tensorT))) or \

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # Generates C++ autograd functions for the derivatives of ATen operations
#
# This writes two files:
#  Functions.h/cpp: subclasses of autograd::Node
#  python_functions.h/cpp: Python bindings for the above classes
#
from .gen_inplace_or_view_type import VIEW_FUNCTIONS

from typing import List, Sequence, Tuple

            

Reported by Pylint.

standard import "from typing import List, Sequence, Tuple" should be placed before "from .gen_inplace_or_view_type import VIEW_FUNCTIONS"
Error

Line: 9 Column: 1

              #
from .gen_inplace_or_view_type import VIEW_FUNCTIONS

from typing import List, Sequence, Tuple

from tools.codegen.api.autograd import (Derivative, DifferentiabilityInfo,
                                        SavedAttribute, uses_retain_variables,
                                        uses_single_grad)
from tools.codegen.api.types import (Binding, BaseCType, OptionalCType, tensorT, intT,

            

Reported by Pylint.

third party import "from tools.codegen.api.autograd import Derivative, DifferentiabilityInfo, SavedAttribute, uses_retain_variables, uses_single_grad" should be placed before "from .gen_inplace_or_view_type import VIEW_FUNCTIONS"
Error

Line: 11 Column: 1

              
from typing import List, Sequence, Tuple

from tools.codegen.api.autograd import (Derivative, DifferentiabilityInfo,
                                        SavedAttribute, uses_retain_variables,
                                        uses_single_grad)
from tools.codegen.api.types import (Binding, BaseCType, OptionalCType, tensorT, intT,
                                     doubleT, scalarT, stringT, boolT, intArrayRefT,
                                     tensorListT, MutRefCType, ListCType, ArrayRefCType)

            

Reported by Pylint.

third party import "from tools.codegen.api.types import Binding, BaseCType, OptionalCType, tensorT, intT, doubleT, scalarT, stringT, boolT, intArrayRefT, tensorListT, MutRefCType, ListCType, ArrayRefCType" should be placed before "from .gen_inplace_or_view_type import VIEW_FUNCTIONS"
Error

Line: 14 Column: 1

              from tools.codegen.api.autograd import (Derivative, DifferentiabilityInfo,
                                        SavedAttribute, uses_retain_variables,
                                        uses_single_grad)
from tools.codegen.api.types import (Binding, BaseCType, OptionalCType, tensorT, intT,
                                     doubleT, scalarT, stringT, boolT, intArrayRefT,
                                     tensorListT, MutRefCType, ListCType, ArrayRefCType)
from tools.codegen.code_template import CodeTemplate
from tools.codegen.gen import FileManager
from tools.codegen.model import Argument

            

Reported by Pylint.

third party import "from tools.codegen.code_template import CodeTemplate" should be placed before "from .gen_inplace_or_view_type import VIEW_FUNCTIONS"
Error

Line: 17 Column: 1

              from tools.codegen.api.types import (Binding, BaseCType, OptionalCType, tensorT, intT,
                                     doubleT, scalarT, stringT, boolT, intArrayRefT,
                                     tensorListT, MutRefCType, ListCType, ArrayRefCType)
from tools.codegen.code_template import CodeTemplate
from tools.codegen.gen import FileManager
from tools.codegen.model import Argument

FUNCTION_DECLARATION = CodeTemplate("""\
struct TORCH_API ${op} : public ${superclass} {

            

Reported by Pylint.

third party import "from tools.codegen.gen import FileManager" should be placed before "from .gen_inplace_or_view_type import VIEW_FUNCTIONS"
Error

Line: 18 Column: 1

                                                   doubleT, scalarT, stringT, boolT, intArrayRefT,
                                     tensorListT, MutRefCType, ListCType, ArrayRefCType)
from tools.codegen.code_template import CodeTemplate
from tools.codegen.gen import FileManager
from tools.codegen.model import Argument

FUNCTION_DECLARATION = CodeTemplate("""\
struct TORCH_API ${op} : public ${superclass} {
  using ${superclass}::${superclass};

            

Reported by Pylint.