The following issues were found

test/distributed/pipeline/sync/test_inplace.py
25 issues
Unable to import 'pytest'
Error

Line: 7 Column: 1

              #
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from torch import nn

from torch.distributed.pipeline.sync import Pipe


            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 8 Column: 1

              # This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from torch import nn

from torch.distributed.pipeline.sync import Pipe



            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 9 Column: 1

              # LICENSE file in the root directory of this source tree.
import pytest
import torch
from torch import nn

from torch.distributed.pipeline.sync import Pipe


def test_inplace_on_requires_grad(setup_rpc):

            

Reported by Pylint.

Unable to import 'torch.distributed.pipeline.sync'
Error

Line: 11 Column: 1

              import torch
from torch import nn

from torch.distributed.pipeline.sync import Pipe


def test_inplace_on_requires_grad(setup_rpc):
    model = nn.Sequential(nn.Linear(1, 1), nn.ReLU(inplace=True))
    model = Pipe(model, checkpoint="always")

            

Reported by Pylint.

Unused argument 'setup_rpc'
Error

Line: 14 Column: 35

              from torch.distributed.pipeline.sync import Pipe


def test_inplace_on_requires_grad(setup_rpc):
    model = nn.Sequential(nn.Linear(1, 1), nn.ReLU(inplace=True))
    model = Pipe(model, checkpoint="always")

    x = torch.rand(1)
    y = model(x).local_value()

            

Reported by Pylint.

Unused argument 'setup_rpc'
Error

Line: 27 Column: 39

              

@pytest.mark.xfail(strict=True)
def test_inplace_on_not_requires_grad(setup_rpc):
    # In-place operation on a tensor not requiring grad doesn't cause a
    # RuntimeError. Currently, we cannot detect this case.
    model = nn.Sequential(nn.ReLU(inplace=True))
    model = Pipe(model, [1], devices=["cpu"], checkpoint="always")


            

Reported by Pylint.

Unused argument 'setup_rpc'
Error

Line: 43 Column: 33

              

@pytest.mark.xfail(strict=True)
def test_inplace_incorrect_grad(setup_rpc):
    class M(nn.Module):
        def forward(self, foo_bar):
            # 'foo' requires grad but 'bar' does not. In-place operation on
            # 'bar' won't cause a RuntimeError.
            foo, bar = foo_bar

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # Copyright 2019 Kakao Brain
#
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from torch import nn

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 14 Column: 1

              from torch.distributed.pipeline.sync import Pipe


def test_inplace_on_requires_grad(setup_rpc):
    model = nn.Sequential(nn.Linear(1, 1), nn.ReLU(inplace=True))
    model = Pipe(model, checkpoint="always")

    x = torch.rand(1)
    y = model(x).local_value()

            

Reported by Pylint.

Variable name "x" doesn't conform to snake_case naming style
Error

Line: 18 Column: 5

                  model = nn.Sequential(nn.Linear(1, 1), nn.ReLU(inplace=True))
    model = Pipe(model, checkpoint="always")

    x = torch.rand(1)
    y = model(x).local_value()

    message = r"a leaf Variable that requires grad .* used in an in-place operation."
    with pytest.raises(RuntimeError, match=message):
        y.backward()

            

Reported by Pylint.

caffe2/python/recurrent.py
25 issues
Unused variable 'ssa'
Error

Line: 106 Column: 9

              
        # also add to the output list the intermediate outputs of fwd_step that
        # are used by backward.
        ssa, blob_versions = core.get_ssa(cell_net.Proto())
        scratches = [
            blob
            for blob, ver in viewitems(blob_versions)
            if (ver > 0 and
                blob in undefined and

            

Reported by Pylint.

Unused variable 'map_to_dual_list'
Error

Line: 230 Column: 5

                                      [output_blob],
                    )

    def map_to_dual_list(m):
        return [str(x) for x in list(m.keys())] + \
               [str(x) for x in list(m.values())]

    backward_args = {}
    if backward_cell_net is not None:

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              ## @package recurrent
# Module caffe2.python.recurrent





from caffe2.python import core, workspace
from future.utils import viewitems, viewkeys

            

Reported by Pylint.

Too many branches (22/12)
Error

Line: 11 Column: 1

              from caffe2.python import core, workspace
from future.utils import viewitems, viewkeys

def recurrent_net(
        net, cell_net, inputs, initial_cell_inputs,
        links, timestep=None, scope=None, outputs_with_grads=(0,),
        recompute_blobs_on_backward=None, forward_only=False,
):
    '''

            

Reported by Pylint.

Too many arguments (10/5)
Error

Line: 11 Column: 1

              from caffe2.python import core, workspace
from future.utils import viewitems, viewkeys

def recurrent_net(
        net, cell_net, inputs, initial_cell_inputs,
        links, timestep=None, scope=None, outputs_with_grads=(0,),
        recompute_blobs_on_backward=None, forward_only=False,
):
    '''

            

Reported by Pylint.

Too many statements (94/50)
Error

Line: 11 Column: 1

              from caffe2.python import core, workspace
from future.utils import viewitems, viewkeys

def recurrent_net(
        net, cell_net, inputs, initial_cell_inputs,
        links, timestep=None, scope=None, outputs_with_grads=(0,),
        recompute_blobs_on_backward=None, forward_only=False,
):
    '''

            

Reported by Pylint.

Too many local variables (64/15)
Error

Line: 11 Column: 1

              from caffe2.python import core, workspace
from future.utils import viewitems, viewkeys

def recurrent_net(
        net, cell_net, inputs, initial_cell_inputs,
        links, timestep=None, scope=None, outputs_with_grads=(0,),
        recompute_blobs_on_backward=None, forward_only=False,
):
    '''

            

Reported by Pylint.

Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.
Security

Line: 49
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html

              
    forward_only: if True, only forward steps are executed
    '''
    assert len(inputs) == 1, "Only one input blob is supported so far"

    input_blobs = [str(i[0]) for i in inputs]
    initial_input_blobs = [str(x[1]) for x in initial_cell_inputs]
    op_name = net.NextName('recurrent')


            

Reported by Bandit.

Function name "s" doesn't conform to snake_case naming style
Error

Line: 55 Column: 5

                  initial_input_blobs = [str(x[1]) for x in initial_cell_inputs]
    op_name = net.NextName('recurrent')

    def s(name):
        # We have to manually scope due to our internal/external blob
        # relationships.
        scope_name = op_name if scope is None else scope
        return "{}/{}".format(str(scope_name), str(name))


            

Reported by Pylint.

Variable name "op" doesn't conform to snake_case naming style
Error

Line: 91 Column: 17

                          recompute_blobs_on_backward = {str(b) for b in
                                           recompute_blobs_on_backward}

            for op in cell_net.Proto().op:
                if not recompute_blobs_on_backward.isdisjoint(set(op.output)):
                    backward_cell_net.Proto().op.extend([op])
                    # This fires if other outputs than the declared
                    # are computed by the ops that are recomputed
                    assert set(op.output).issubset(recompute_blobs_on_backward)

            

Reported by Pylint.

caffe2/python/operator_test/rebatching_queue_test.py
25 issues
Unable to import 'hypothesis'
Error

Line: 11 Column: 1

              import numpy as np
import numpy.testing as npt

from hypothesis import given, settings
import hypothesis.strategies as st

import functools



            

Reported by Pylint.

Unable to import 'hypothesis.strategies'
Error

Line: 12 Column: 1

              import numpy.testing as npt

from hypothesis import given, settings
import hypothesis.strategies as st

import functools


def primefac(n):

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 54 Column: 13

                      workspace.RunNetOnce(net)

        for idx in range(3):
            self.assertEquals(workspace.FetchBlob(results[idx]), [1.0])

    def test_rebatching_queue_multi_enqueue_dequeue(self):
        net = core.Net('net')
        workspace.FeedBlob(
            "tensors", np.array([x for x in range(10)], np.int32)

            

Reported by Pylint.

Unused argument 'outs'
Error

Line: 220 Column: 25

                      ### Consumers ###
        outputs = []

        def append(ins, outs):
            # Extend is atomic
            outputs.extend(ins[0].data.tolist())

        consumer_steps = []
        for i in range(num_consumers):

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 283 Column: 9

                      # We check that the outputs are a permutation of inputs
        inputs.sort()
        outputs.sort()
        self.assertEquals(inputs, outputs)


if __name__ == "__main__":
    import unittest
    unittest.main()

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              



from caffe2.python import core, workspace
from caffe2.python.test_util import TestCase

import numpy as np
import numpy.testing as npt

            

Reported by Pylint.

standard import "import functools" should be placed before "from caffe2.python import core, workspace"
Error

Line: 14 Column: 1

              from hypothesis import given, settings
import hypothesis.strategies as st

import functools


def primefac(n):
    ret = []
    divisor = 2

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 17 Column: 1

              import functools


def primefac(n):
    ret = []
    divisor = 2
    while divisor * divisor <= n:
        while (n % divisor) == 0:
            ret.append(divisor)

            

Reported by Pylint.

Argument name "n" doesn't conform to snake_case naming style
Error

Line: 17 Column: 1

              import functools


def primefac(n):
    ret = []
    divisor = 2
    while divisor * divisor <= n:
        while (n % divisor) == 0:
            ret.append(divisor)

            

Reported by Pylint.

Missing class docstring
Error

Line: 30 Column: 1

                  return ret


class TestReBatchingQueue(TestCase):
    def test_rebatching_queue_single_enqueue_dequeue(self):
        net = core.Net('net')

        tensors = [
            net.ConstantFill([], 1, value=1.0, run_once=False)

            

Reported by Pylint.

torch/distributed/elastic/multiprocessing/errors/error_handler.py
25 issues
Catching too general exception Exception
Error

Line: 67 Column: 16

                      """
        try:
            faulthandler.enable(all_threads=True)
        except Exception as e:
            warnings.warn(f"Unable to enable fault handler. {type(e).__name__}: {e}")

    def _write_error_file(self, file_path: str, error_msg: str) -> None:
        """
        Writes error message to the file.

            

Reported by Pylint.

Catching too general exception Exception
Error

Line: 77 Column: 16

                      try:
            with open(file_path, "w") as fp:
                fp.write(error_msg)
        except Exception as e:
            warnings.warn(f"Unable to write error to file. {type(e).__name__}: {e}")

    def record_exception(self, e: BaseException) -> None:
        """
        Writes a structured information about the exception into an error file in

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 98 Column: 21

                          # is terminated by singals like SIGSEGV.
            if error_code:
                if "message" not in rootcause_error:
                    log.warning(
                        f"child error file ({rootcause_error_file}) does not have field `message`. \n"
                        f"cannot override error code: {error_code}"
                    )
                elif isinstance(rootcause_error["message"], str):
                    log.warning(

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 103 Column: 21

                                      f"cannot override error code: {error_code}"
                    )
                elif isinstance(rootcause_error["message"], str):
                    log.warning(
                        f"child error file ({rootcause_error_file}) has a new message format. \n"
                        f"skipping error code override"
                    )
                else:
                    rootcause_error["message"]["errorCode"] = error_code

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 110 Column: 13

                              else:
                    rootcause_error["message"]["errorCode"] = error_code

            log.info(
                f"child error file ({rootcause_error_file}) contents:\n"
                f"{json.dumps(rootcause_error, indent=2)}"
            )

        my_error_file = self._get_error_file_path()

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 129 Column: 13

                          # original error file contents and overwrite the error file.
            self._rm(my_error_file)
            self._write_error_file(my_error_file, json.dumps(rootcause_error))
            log.info(f"dumped error file to parent's {my_error_file}")
        else:
            log.error(
                f"no error file defined for parent, to copy child error file ({rootcause_error_file})"
            )


            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 131 Column: 13

                          self._write_error_file(my_error_file, json.dumps(rootcause_error))
            log.info(f"dumped error file to parent's {my_error_file}")
        else:
            log.error(
                f"no error file defined for parent, to copy child error file ({rootcause_error_file})"
            )

    def _rm(self, my_error_file):
        if os.path.isfile(my_error_file):

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 141 Column: 21

                          with open(my_error_file, "r") as fp:
                try:
                    original = json.dumps(json.load(fp), indent=2)
                    log.warning(
                        f"{my_error_file} already exists"
                        f" and will be overwritten."
                        f" Original contents:\n{original}"
                    )
                except json.decoder.JSONDecodeError as err:

            

Reported by Pylint.

Unused variable 'err'
Error

Line: 146 Column: 17

                                      f" and will be overwritten."
                        f" Original contents:\n{original}"
                    )
                except json.decoder.JSONDecodeError as err:
                    log.warning(
                        f"{my_error_file} already exists"
                        f" and will be overwritten."
                        f" Unable to load original contents:\n"
                    )

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 147 Column: 21

                                      f" Original contents:\n{original}"
                    )
                except json.decoder.JSONDecodeError as err:
                    log.warning(
                        f"{my_error_file} already exists"
                        f" and will be overwritten."
                        f" Unable to load original contents:\n"
                    )
            os.remove(my_error_file)

            

Reported by Pylint.

benchmarks/tensorexpr/matmul.py
25 issues
Attempted relative import beyond top-level package
Error

Line: 1 Column: 1

              from . import benchmark
import numpy as np


class MatMulBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, B, M, N, K):
        super().__init__(mode, device, dtype)
        self.B = B
        self.M = M

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from . import benchmark
import numpy as np


class MatMulBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, B, M, N, K):
        super().__init__(mode, device, dtype)
        self.B = B
        self.M = M

            

Reported by Pylint.

third party import "import numpy as np" should be placed before "from . import benchmark"
Error

Line: 2 Column: 1

              from . import benchmark
import numpy as np


class MatMulBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, B, M, N, K):
        super().__init__(mode, device, dtype)
        self.B = B
        self.M = M

            

Reported by Pylint.

Missing class docstring
Error

Line: 5 Column: 1

              import numpy as np


class MatMulBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, B, M, N, K):
        super().__init__(mode, device, dtype)
        self.B = B
        self.M = M
        self.N = N

            

Reported by Pylint.

Argument name "B" doesn't conform to snake_case naming style
Error

Line: 6 Column: 5

              

class MatMulBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, B, M, N, K):
        super().__init__(mode, device, dtype)
        self.B = B
        self.M = M
        self.N = N
        self.K = K

            

Reported by Pylint.

Too many arguments (8/5)
Error

Line: 6 Column: 5

              

class MatMulBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, B, M, N, K):
        super().__init__(mode, device, dtype)
        self.B = B
        self.M = M
        self.N = N
        self.K = K

            

Reported by Pylint.

Argument name "K" doesn't conform to snake_case naming style
Error

Line: 6 Column: 5

              

class MatMulBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, B, M, N, K):
        super().__init__(mode, device, dtype)
        self.B = B
        self.M = M
        self.N = N
        self.K = K

            

Reported by Pylint.

Argument name "N" doesn't conform to snake_case naming style
Error

Line: 6 Column: 5

              

class MatMulBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, B, M, N, K):
        super().__init__(mode, device, dtype)
        self.B = B
        self.M = M
        self.N = N
        self.K = K

            

Reported by Pylint.

Argument name "M" doesn't conform to snake_case naming style
Error

Line: 6 Column: 5

              

class MatMulBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, B, M, N, K):
        super().__init__(mode, device, dtype)
        self.B = B
        self.M = M
        self.N = N
        self.K = K

            

Reported by Pylint.

Attribute name "B" doesn't conform to snake_case naming style
Error

Line: 8 Column: 9

              class MatMulBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, B, M, N, K):
        super().__init__(mode, device, dtype)
        self.B = B
        self.M = M
        self.N = N
        self.K = K
        self.d1 = self.rand([B, M, N], device=device, dtype=dtype, requires_grad=self.requires_grad)
        self.d2 = self.rand([B, N, K], device=device, dtype=dtype, requires_grad=self.requires_grad)

            

Reported by Pylint.

benchmarks/tensorexpr/conv.py
25 issues
Attempted relative import beyond top-level package
Error

Line: 1 Column: 1

              from . import benchmark


class ConvImplBench(benchmark.Benchmark):
    def __init__(self, case, mode, device, dtype, kernel_size, N, iC, H, W, oC):
        super().__init__(mode, device, dtype)
        self.case = case
        self.kernel_size = kernel_size
        self.N = N

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from . import benchmark


class ConvImplBench(benchmark.Benchmark):
    def __init__(self, case, mode, device, dtype, kernel_size, N, iC, H, W, oC):
        super().__init__(mode, device, dtype)
        self.case = case
        self.kernel_size = kernel_size
        self.N = N

            

Reported by Pylint.

Too many instance attributes (10/7)
Error

Line: 4 Column: 1

              from . import benchmark


class ConvImplBench(benchmark.Benchmark):
    def __init__(self, case, mode, device, dtype, kernel_size, N, iC, H, W, oC):
        super().__init__(mode, device, dtype)
        self.case = case
        self.kernel_size = kernel_size
        self.N = N

            

Reported by Pylint.

Missing class docstring
Error

Line: 4 Column: 1

              from . import benchmark


class ConvImplBench(benchmark.Benchmark):
    def __init__(self, case, mode, device, dtype, kernel_size, N, iC, H, W, oC):
        super().__init__(mode, device, dtype)
        self.case = case
        self.kernel_size = kernel_size
        self.N = N

            

Reported by Pylint.

Argument name "iC" doesn't conform to snake_case naming style
Error

Line: 5 Column: 5

              

class ConvImplBench(benchmark.Benchmark):
    def __init__(self, case, mode, device, dtype, kernel_size, N, iC, H, W, oC):
        super().__init__(mode, device, dtype)
        self.case = case
        self.kernel_size = kernel_size
        self.N = N
        self.iC = iC

            

Reported by Pylint.

Too many arguments (11/5)
Error

Line: 5 Column: 5

              

class ConvImplBench(benchmark.Benchmark):
    def __init__(self, case, mode, device, dtype, kernel_size, N, iC, H, W, oC):
        super().__init__(mode, device, dtype)
        self.case = case
        self.kernel_size = kernel_size
        self.N = N
        self.iC = iC

            

Reported by Pylint.

Argument name "N" doesn't conform to snake_case naming style
Error

Line: 5 Column: 5

              

class ConvImplBench(benchmark.Benchmark):
    def __init__(self, case, mode, device, dtype, kernel_size, N, iC, H, W, oC):
        super().__init__(mode, device, dtype)
        self.case = case
        self.kernel_size = kernel_size
        self.N = N
        self.iC = iC

            

Reported by Pylint.

Argument name "oC" doesn't conform to snake_case naming style
Error

Line: 5 Column: 5

              

class ConvImplBench(benchmark.Benchmark):
    def __init__(self, case, mode, device, dtype, kernel_size, N, iC, H, W, oC):
        super().__init__(mode, device, dtype)
        self.case = case
        self.kernel_size = kernel_size
        self.N = N
        self.iC = iC

            

Reported by Pylint.

Argument name "W" doesn't conform to snake_case naming style
Error

Line: 5 Column: 5

              

class ConvImplBench(benchmark.Benchmark):
    def __init__(self, case, mode, device, dtype, kernel_size, N, iC, H, W, oC):
        super().__init__(mode, device, dtype)
        self.case = case
        self.kernel_size = kernel_size
        self.N = N
        self.iC = iC

            

Reported by Pylint.

Argument name "H" doesn't conform to snake_case naming style
Error

Line: 5 Column: 5

              

class ConvImplBench(benchmark.Benchmark):
    def __init__(self, case, mode, device, dtype, kernel_size, N, iC, H, W, oC):
        super().__init__(mode, device, dtype)
        self.case = case
        self.kernel_size = kernel_size
        self.N = N
        self.iC = iC

            

Reported by Pylint.

torch/ao/sparsity/experimental/pruner/base_pruner.py
25 issues
Attempted relative import beyond top-level package
Error

Line: 11 Column: 1

              
from torch.nn.modules.container import ModuleDict, ModuleList

from .parametrization import PruningParametrization, LinearActivationReconstruction, Conv2dActivationReconstruction

SUPPORTED_MODULES = {
    nn.Linear,
    nn.Conv2d
}

            

Reported by Pylint.

Module 'torch' has no 'tensor' member; maybe 'Tensor'?
Error

Line: 135 Column: 48

                              module = config['module']

            if getattr(module, 'mask', None) is None:
                module.register_buffer('mask', torch.tensor(module.weight.shape[0]))
            param = config.get('parametrization', PruningParametrization)
            parametrize.register_parametrization(module, 'weight',
                                                 param(module.mask),
                                                 unsafe=True)


            

Reported by Pylint.

Unused variable 'name'
Error

Line: 76 Column: 21

                          stack = [model]
            while stack:
                module = stack.pop()
                for name, child in module.named_children():
                    if type(child) in SUPPORTED_MODULES:
                        self.config.append(child)
                    else:
                        stack.append(child)


            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 117 Column: 33

                      format_string += ')'
        return format_string

    def bias_hook(self, module, input, output):
        if getattr(module, '_bias', None) is not None:
            idx = [1] * len(output.shape)
            idx[1] = output.shape[1]
            bias = module._bias.reshape(idx)
            output += bias

            

Reported by Pylint.

Unused argument 'input'
Error

Line: 117 Column: 33

                      format_string += ')'
        return format_string

    def bias_hook(self, module, input, output):
        if getattr(module, '_bias', None) is not None:
            idx = [1] * len(output.shape)
            idx[1] = output.shape[1]
            bias = module._bias.reshape(idx)
            output += bias

            

Reported by Pylint.

Access to a protected member _bias of a client class
Error

Line: 121 Column: 20

                      if getattr(module, '_bias', None) is not None:
            idx = [1] * len(output.shape)
            idx[1] = output.shape[1]
            bias = module._bias.reshape(idx)
            output += bias
        return output

    def prepare(self, use_path=False, *args, **kwargs):
        r"""Adds mask parametrization to the layer weight

            

Reported by Pylint.

Keyword argument before variable positional arguments list in the definition of prepare function
Error

Line: 125 Column: 5

                          output += bias
        return output

    def prepare(self, use_path=False, *args, **kwargs):
        r"""Adds mask parametrization to the layer weight
        """
        for config in self.module_groups:
            if use_path:
                module = _path_to_module(self.model, config['path'])

            

Reported by Pylint.

Unused argument 'kwargs'
Error

Line: 125 Column: 1

                          output += bias
        return output

    def prepare(self, use_path=False, *args, **kwargs):
        r"""Adds mask parametrization to the layer weight
        """
        for config in self.module_groups:
            if use_path:
                module = _path_to_module(self.model, config['path'])

            

Reported by Pylint.

Unused argument 'args'
Error

Line: 125 Column: 1

                          output += bias
        return output

    def prepare(self, use_path=False, *args, **kwargs):
        r"""Adds mask parametrization to the layer weight
        """
        for config in self.module_groups:
            if use_path:
                module = _path_to_module(self.model, config['path'])

            

Reported by Pylint.

Unused argument 'kwargs'
Error

Line: 159 Column: 1

                              module.bias = None
            self.bias_handles.append(module.register_forward_hook(self.bias_hook))

    def convert(self, use_path=False, *args, **kwargs):
        for config in self.module_groups:
            if use_path:
                module = _path_to_module(self.model, config['path'])
            else:
                module = config['module']

            

Reported by Pylint.

torch/nn/grad.py
25 issues
Attempted relative import beyond top-level package
Error

Line: 4 Column: 1

              """Gradient interface"""

import torch
from .modules.utils import _single, _pair, _triple
import warnings


def _grad_input_padding(grad_output, input_size, stride, padding, kernel_size, dilation=None):
    if dilation is None:

            

Reported by Pylint.

Module 'torch' has no 'conv_transpose1d' member
Error

Line: 76 Column: 12

                  grad_input_padding = _grad_input_padding(grad_output, input_size, stride,
                                             padding, kernel_size, dilation)

    return torch.conv_transpose1d(
        grad_output, weight, None, stride, padding, grad_input_padding, groups,
        dilation)


def conv1d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):

            

Reported by Pylint.

Module 'torch' has no 'conv1d' member
Error

Line: 118 Column: 19

                  input = input.contiguous().view(1, input.shape[0] * input.shape[1],
                                    input.shape[2])

    grad_weight = torch.conv1d(input, grad_output, None, dilation, padding,
                               stride, in_channels * min_batch)

    grad_weight = grad_weight.contiguous().view(
        min_batch, grad_weight.shape[1] // min_batch, grad_weight.shape[2])


            

Reported by Pylint.

Module 'torch' has no 'conv_transpose2d' member
Error

Line: 165 Column: 12

                  grad_input_padding = _grad_input_padding(grad_output, input_size, stride,
                                             padding, kernel_size, dilation)

    return torch.conv_transpose2d(
        grad_output, weight, None, stride, padding, grad_input_padding, groups,
        dilation)


def conv2d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):

            

Reported by Pylint.

Module 'torch' has no 'conv2d' member
Error

Line: 209 Column: 19

                  input = input.contiguous().view(1, input.shape[0] * input.shape[1],
                                    input.shape[2], input.shape[3])

    grad_weight = torch.conv2d(input, grad_output, None, dilation, padding,
                               stride, in_channels * min_batch)

    grad_weight = grad_weight.contiguous().view(
        min_batch, grad_weight.shape[1] // min_batch, grad_weight.shape[2],
        grad_weight.shape[3])

            

Reported by Pylint.

Module 'torch' has no 'conv_transpose3d' member
Error

Line: 258 Column: 12

                  grad_input_padding = _grad_input_padding(grad_output, input_size, stride,
                                             padding, kernel_size, dilation)

    return torch.conv_transpose3d(
        grad_output, weight, None, stride, padding, grad_input_padding, groups,
        dilation)


def conv3d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):

            

Reported by Pylint.

Module 'torch' has no 'conv3d' member
Error

Line: 302 Column: 19

                                                  input.shape[2], input.shape[3],
                                    input.shape[4])

    grad_weight = torch.conv3d(input, grad_output, None, dilation, padding,
                               stride, in_channels * min_batch)

    grad_weight = grad_weight.contiguous().view(
        min_batch, grad_weight.shape[1] // min_batch, grad_weight.shape[2],
        grad_weight.shape[3], grad_weight.shape[4])

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 81 Column: 19

                      dilation)


def conv1d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):
    r"""
    Computes the gradient of conv1d with respect to the weight of the convolution.

    Args:
        input: input tensor of shape (minibatch x in_channels x iW)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 170 Column: 19

                      dilation)


def conv2d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):
    r"""
    Computes the gradient of conv2d with respect to the weight of the convolution.

    Args:
        input: input tensor of shape (minibatch x in_channels x iH x iW)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 263 Column: 19

                      dilation)


def conv3d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):
    r"""
    Computes the gradient of conv3d with respect to the weight of the convolution.

    Args:
        input: input tensor of shape (minibatch x in_channels x iT x iH x iW)

            

Reported by Pylint.

torch/distributed/elastic/metrics/api.py
25 issues
Using the global statement
Error

Line: 69 Column: 9

              # pyre-fixme[9]: group has type `str`; used as `None`.
def configure(handler: MetricHandler, group: str = None):
    if group is None:
        global _default_metrics_handler
        # pyre-fixme[9]: _default_metrics_handler has type `NullMetricHandler`; used
        #  as `MetricHandler`.
        _default_metrics_handler = handler
    else:
        _metrics_map[group] = handler

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              #!/usr/bin/env python3

# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import abc

            

Reported by Pylint.

Missing class docstring
Error

Line: 19 Column: 1

              MetricData = namedtuple("MetricData", ["timestamp", "group_name", "name", "value"])


class MetricsConfig:
    __slots__ = ["params"]

    def __init__(self, params: Optional[Dict[str, str]] = None):
        self.params = params
        if self.params is None:

            

Reported by Pylint.

Too few public methods (0/2)
Error

Line: 19 Column: 1

              MetricData = namedtuple("MetricData", ["timestamp", "group_name", "name", "value"])


class MetricsConfig:
    __slots__ = ["params"]

    def __init__(self, params: Optional[Dict[str, str]] = None):
        self.params = params
        if self.params is None:

            

Reported by Pylint.

Missing class docstring
Error

Line: 28 Column: 1

                          self.params = {}


class MetricHandler(abc.ABC):
    @abc.abstractmethod
    def emit(self, metric_data: MetricData):
        pass



            

Reported by Pylint.

Too few public methods (1/2)
Error

Line: 28 Column: 1

                          self.params = {}


class MetricHandler(abc.ABC):
    @abc.abstractmethod
    def emit(self, metric_data: MetricData):
        pass



            

Reported by Pylint.

Missing function or method docstring
Error

Line: 30 Column: 5

              
class MetricHandler(abc.ABC):
    @abc.abstractmethod
    def emit(self, metric_data: MetricData):
        pass


class ConsoleMetricHandler(MetricHandler):
    def emit(self, metric_data: MetricData):

            

Reported by Pylint.

Missing class docstring
Error

Line: 34 Column: 1

                      pass


class ConsoleMetricHandler(MetricHandler):
    def emit(self, metric_data: MetricData):
        print(
            "[{}][{}]: {}={}".format(
                metric_data.timestamp,
                metric_data.group_name,

            

Reported by Pylint.

Too few public methods (1/2)
Error

Line: 34 Column: 1

                      pass


class ConsoleMetricHandler(MetricHandler):
    def emit(self, metric_data: MetricData):
        print(
            "[{}][{}]: {}={}".format(
                metric_data.timestamp,
                metric_data.group_name,

            

Reported by Pylint.

Missing class docstring
Error

Line: 46 Column: 1

                      )


class NullMetricHandler(MetricHandler):
    def emit(self, metric_data: MetricData):
        pass


class MetricStream:

            

Reported by Pylint.

test/test_set_default_mobile_cpu_allocator.py
25 issues
Unable to import 'torch'
Error

Line: 1 Column: 1

              import torch
from torch.testing._internal.common_utils import TestCase, run_tests

class TestSetDefaultMobileCPUAllocator(TestCase):
    def test_no_exception(self):
        torch._C._set_default_mobile_cpu_allocator()
        torch._C._unset_default_mobile_cpu_allocator()

    def test_exception(self):

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 2 Column: 1

              import torch
from torch.testing._internal.common_utils import TestCase, run_tests

class TestSetDefaultMobileCPUAllocator(TestCase):
    def test_no_exception(self):
        torch._C._set_default_mobile_cpu_allocator()
        torch._C._unset_default_mobile_cpu_allocator()

    def test_exception(self):

            

Reported by Pylint.

Access to a protected member _set_default_mobile_cpu_allocator of a client class
Error

Line: 6 Column: 9

              
class TestSetDefaultMobileCPUAllocator(TestCase):
    def test_no_exception(self):
        torch._C._set_default_mobile_cpu_allocator()
        torch._C._unset_default_mobile_cpu_allocator()

    def test_exception(self):
        with self.assertRaises(Exception):
            torch._C._unset_default_mobile_cpu_allocator()

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 6 Column: 9

              
class TestSetDefaultMobileCPUAllocator(TestCase):
    def test_no_exception(self):
        torch._C._set_default_mobile_cpu_allocator()
        torch._C._unset_default_mobile_cpu_allocator()

    def test_exception(self):
        with self.assertRaises(Exception):
            torch._C._unset_default_mobile_cpu_allocator()

            

Reported by Pylint.

Access to a protected member _unset_default_mobile_cpu_allocator of a client class
Error

Line: 7 Column: 9

              class TestSetDefaultMobileCPUAllocator(TestCase):
    def test_no_exception(self):
        torch._C._set_default_mobile_cpu_allocator()
        torch._C._unset_default_mobile_cpu_allocator()

    def test_exception(self):
        with self.assertRaises(Exception):
            torch._C._unset_default_mobile_cpu_allocator()


            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 7 Column: 9

              class TestSetDefaultMobileCPUAllocator(TestCase):
    def test_no_exception(self):
        torch._C._set_default_mobile_cpu_allocator()
        torch._C._unset_default_mobile_cpu_allocator()

    def test_exception(self):
        with self.assertRaises(Exception):
            torch._C._unset_default_mobile_cpu_allocator()


            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 11 Column: 13

              
    def test_exception(self):
        with self.assertRaises(Exception):
            torch._C._unset_default_mobile_cpu_allocator()

        with self.assertRaises(Exception):
            torch._C._set_default_mobile_cpu_allocator()
            torch._C._set_default_mobile_cpu_allocator()


            

Reported by Pylint.

Access to a protected member _unset_default_mobile_cpu_allocator of a client class
Error

Line: 11 Column: 13

              
    def test_exception(self):
        with self.assertRaises(Exception):
            torch._C._unset_default_mobile_cpu_allocator()

        with self.assertRaises(Exception):
            torch._C._set_default_mobile_cpu_allocator()
            torch._C._set_default_mobile_cpu_allocator()


            

Reported by Pylint.

Access to a protected member _set_default_mobile_cpu_allocator of a client class
Error

Line: 14 Column: 13

                          torch._C._unset_default_mobile_cpu_allocator()

        with self.assertRaises(Exception):
            torch._C._set_default_mobile_cpu_allocator()
            torch._C._set_default_mobile_cpu_allocator()

        # Must reset to good state
        # For next test.
        torch._C._unset_default_mobile_cpu_allocator()

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 14 Column: 13

                          torch._C._unset_default_mobile_cpu_allocator()

        with self.assertRaises(Exception):
            torch._C._set_default_mobile_cpu_allocator()
            torch._C._set_default_mobile_cpu_allocator()

        # Must reset to good state
        # For next test.
        torch._C._unset_default_mobile_cpu_allocator()

            

Reported by Pylint.