The following issues were found

benchmarks/operator_benchmark/pt/stack_test.py
21 issues
Unable to import 'torch'
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
import torch
import random
from typing import List


"""Microbenchmarks for Stack operator"""

# Configs for PT stack operator

            

Reported by Pylint.

Module 'operator_benchmark' has no 'config_list' member
Error

Line: 10 Column: 32

              """Microbenchmarks for Stack operator"""

# Configs for PT stack operator
stack_configs_static_runtime = op_bench.config_list(
    attr_names=['sizes', 'N'],
    attrs=[
        [(20, 40), 5],
        [(1, 40), 5],
    ],

            

Reported by Pylint.

Module 'operator_benchmark' has no 'config_list' member
Error

Line: 23 Column: 23

                  tags=['static_runtime'],
)

stack_configs_short = op_bench.config_list(
    attr_names=['sizes', 'N'],
    attrs=[
        [(1,    1,      1), 2],  # noqa: E241
        [(512,  512,    2), 2],  # noqa: E241
        [(128, 1024,    2), 2],  # noqa: E241

            

Reported by Pylint.

Module 'operator_benchmark' has no 'config_list' member
Error

Line: 37 Column: 22

                  tags=['short'],
)

stack_configs_long = op_bench.config_list(
    attr_names=['sizes', 'N'],
    attrs=[
        [(2**10,    2**10,      2), 2],  # noqa: E241
        [(2**10+1,  2**10-1,    2), 2],  # noqa: E226,E241
        [(2**10,    2**10,      2), 2],  # noqa: E241

            

Reported by Pylint.

Module 'operator_benchmark' has no 'config_list' member
Error

Line: 52 Column: 26

              )

# There is a different codepath on CUDA for >4 dimensions
stack_configs_multidim = op_bench.config_list(
    attr_names=['sizes', 'N'],
    attrs=[
        [(2**6,     2**5,   2**2,   2**4,   2**5), 2],  # noqa: E241
        [(2**4,     2**5,   2**2,   2**4,   2**5), 8],  # noqa: E241
        [(2**3+1,   2**5-1, 2**2+1, 2**4-1, 2**5+1), 17],  # noqa: E226,E241

            

Reported by Pylint.

Module 'operator_benchmark' has no 'TorchBenchmarkBase' member
Error

Line: 66 Column: 22

                  tags=['multidim'],
)

class StackBenchmark(op_bench.TorchBenchmarkBase):
    def init(self, sizes, N, dim, device):
        random.seed(42)
        inputs = []
        gen_sizes = []
        if type(sizes) == list and N == -1:

            

Reported by Pylint.

Module 'operator_benchmark' has no 'generate_pt_test' member
Error

Line: 91 Column: 1

                      return torch.stack(inputs, dim=dim, out=result)


op_bench.generate_pt_test(stack_configs_static_runtime +
                          stack_configs_short +
                          stack_configs_long +
                          stack_configs_multidim,
                          StackBenchmark)


            

Reported by Pylint.

String statement has no effect
Error

Line: 7 Column: 1

              from typing import List


"""Microbenchmarks for Stack operator"""

# Configs for PT stack operator
stack_configs_static_runtime = op_bench.config_list(
    attr_names=['sizes', 'N'],
    attrs=[

            

Reported by Pylint.

Unused variable 'i'
Error

Line: 74 Column: 17

                      if type(sizes) == list and N == -1:
            gen_sizes = sizes
        else:
            for i in range(N):
                gen_sizes.append([old_size() if callable(old_size) else old_size for old_size in sizes])

        for s in gen_sizes:
            inputs.append(torch.rand(s, device=device))
        result = torch.rand(gen_sizes[0], device=device)

            

Reported by Pylint.

Attribute 'inputs' defined outside __init__
Error

Line: 80 Column: 9

                      for s in gen_sizes:
            inputs.append(torch.rand(s, device=device))
        result = torch.rand(gen_sizes[0], device=device)
        self.inputs = {
            "result": result,
            "inputs": inputs,
            "dim": dim
        }
        self.set_module_name('stack')

            

Reported by Pylint.

benchmarks/operator_benchmark/benchmark_all_quantized_test.py
21 issues
Unable to import 'pt'
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    qactivation_test,
    qarithmetic_test,
    qbatchnorm_test,
    qcat_test,
    qcomparators_test,
    qconv_test,
    qgroupnorm_test,

            

Reported by Pylint.

Unused qarithmetic_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    qactivation_test,
    qarithmetic_test,
    qbatchnorm_test,
    qcat_test,
    qcomparators_test,
    qconv_test,
    qgroupnorm_test,

            

Reported by Pylint.

Unused qbatchnorm_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    qactivation_test,
    qarithmetic_test,
    qbatchnorm_test,
    qcat_test,
    qcomparators_test,
    qconv_test,
    qgroupnorm_test,

            

Reported by Pylint.

Unused qcat_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    qactivation_test,
    qarithmetic_test,
    qbatchnorm_test,
    qcat_test,
    qcomparators_test,
    qconv_test,
    qgroupnorm_test,

            

Reported by Pylint.

Unused qlayernorm_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    qactivation_test,
    qarithmetic_test,
    qbatchnorm_test,
    qcat_test,
    qcomparators_test,
    qconv_test,
    qgroupnorm_test,

            

Reported by Pylint.

Unused qlinear_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    qactivation_test,
    qarithmetic_test,
    qbatchnorm_test,
    qcat_test,
    qcomparators_test,
    qconv_test,
    qgroupnorm_test,

            

Reported by Pylint.

Unused qobserver_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    qactivation_test,
    qarithmetic_test,
    qbatchnorm_test,
    qcat_test,
    qcomparators_test,
    qconv_test,
    qgroupnorm_test,

            

Reported by Pylint.

Unused qgroupnorm_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    qactivation_test,
    qarithmetic_test,
    qbatchnorm_test,
    qcat_test,
    qcomparators_test,
    qconv_test,
    qgroupnorm_test,

            

Reported by Pylint.

Unused qpool_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    qactivation_test,
    qarithmetic_test,
    qbatchnorm_test,
    qcat_test,
    qcomparators_test,
    qconv_test,
    qgroupnorm_test,

            

Reported by Pylint.

Unused qtensor_method_test imported from pt
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
from pt import (  # noqa: F401
    qactivation_test,
    qarithmetic_test,
    qbatchnorm_test,
    qcat_test,
    qcomparators_test,
    qconv_test,
    qgroupnorm_test,

            

Reported by Pylint.

benchmarks/sparse/utils.py
21 issues
Unable to import 'torch'
Error

Line: 1 Column: 1

              import torch
import functools
import random
import operator
import numpy as np
import time

# shim for torch.cuda.Event when running on cpu
class Event(object):

            

Reported by Pylint.

Attribute 'time' defined outside __init__
Error

Line: 14 Column: 9

                      pass

    def record(self):
        self.time = time.perf_counter()

    def elapsed_time(self, end_event):
        assert isinstance(end_event, Event)
        return end_event.time - self.time


            

Reported by Pylint.

Unused variable 'n'
Error

Line: 36 Column: 9

                  dense = np.random.randn(*shape)
    values = []
    indices = [[], []]
    for n in range(nnz):
        row = random.randint(0, shape[0] - 1)
        col = random.randint(0, shape[1] - 1)
        indices[0].append(row)
        indices[1].append(col)
        values.append(dense[row, col])

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
import functools
import random
import operator
import numpy as np
import time

# shim for torch.cuda.Event when running on cpu
class Event(object):

            

Reported by Pylint.

standard import "import functools" should be placed before "import torch"
Error

Line: 2 Column: 1

              import torch
import functools
import random
import operator
import numpy as np
import time

# shim for torch.cuda.Event when running on cpu
class Event(object):

            

Reported by Pylint.

standard import "import random" should be placed before "import torch"
Error

Line: 3 Column: 1

              import torch
import functools
import random
import operator
import numpy as np
import time

# shim for torch.cuda.Event when running on cpu
class Event(object):

            

Reported by Pylint.

standard import "import operator" should be placed before "import torch"
Error

Line: 4 Column: 1

              import torch
import functools
import random
import operator
import numpy as np
import time

# shim for torch.cuda.Event when running on cpu
class Event(object):

            

Reported by Pylint.

standard import "import time" should be placed before "import torch"
Error

Line: 6 Column: 1

              import random
import operator
import numpy as np
import time

# shim for torch.cuda.Event when running on cpu
class Event(object):
    def __init__(self, enable_timing):
        pass

            

Reported by Pylint.

Missing class docstring
Error

Line: 9 Column: 1

              import time

# shim for torch.cuda.Event when running on cpu
class Event(object):
    def __init__(self, enable_timing):
        pass

    def record(self):
        self.time = time.perf_counter()

            

Reported by Pylint.

Class 'Event' inherits from object, can be safely removed from bases in python3
Error

Line: 9 Column: 1

              import time

# shim for torch.cuda.Event when running on cpu
class Event(object):
    def __init__(self, enable_timing):
        pass

    def record(self):
        self.time = time.perf_counter()

            

Reported by Pylint.

caffe2/python/layers/adaptive_weight.py
21 issues
self.output_schema is not callable
Error

Line: 154 Column: 45

                      self.reg_func(net, reg)
        net.Mul([weight, x], weighted_x)
        net.Add([weighted_x, reg], weighted_x_add_reg)
        net.SumElements(weighted_x_add_reg, self.output_schema())
        if enable_diagnose:
            for i in range(self.num):
                net.Slice(weight, self.weight_i[i], starts=[i], ends=[i + 1])

    def add_ops(self, net):

            

Reported by Pylint.

String statement has no effect
Error

Line: 11 Column: 1

              from caffe2.python.regularizer import BoundedGradientProjection, LogBarrier


"""
Implementation of adaptive weighting: https://arxiv.org/pdf/1705.07115.pdf
"""


class AdaptiveWeight(ModelLayer):

            

Reported by Pylint.

Attribute 'mu' defined outside __init__
Error

Line: 87 Column: 9

                          "GivenTensorFill",
            {"values": values, "dtype": core.DataType.FLOAT},
        )
        self.mu = self.create_param(
            param_name="mu",
            shape=[self.num],
            initializer=initializer,
            optimizer=self.optimizer,
        )

            

Reported by Pylint.

Unused argument 'x'
Error

Line: 94 Column: 30

                          optimizer=self.optimizer,
        )

    def log_std_weight(self, x, net, weight):
        """
        min 1 / 2 / e^mu X + mu / 2
        """
        mu_neg = net.NextScopedBlob("mu_neg")
        net.Negative(self.mu, mu_neg)

            

Reported by Pylint.

Attribute 'k' defined outside __init__
Error

Line: 128 Column: 9

                                  self.pos_optim_method
                )
            )
        self.k = self.create_param(
            param_name="k",
            shape=[self.num],
            initializer=initializer,
            optimizer=self.optimizer,
            regularizer=regularizer,

            

Reported by Pylint.

Unused argument 'x'
Error

Line: 136 Column: 30

                          regularizer=regularizer,
        )

    def inv_var_weight(self, x, net, weight):
        net.Scale(self.k, weight, scale=0.5)

    def inv_var_reg(self, net, reg):
        log_k = net.NextScopedBlob("log_k")
        net.Log(self.k, log_k)

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # @package adaptive_weight
# Module caffe2.fb.python.layers.adaptive_weight


import numpy as np
from caffe2.python import core, schema
from caffe2.python.layers.layers import ModelLayer
from caffe2.python.regularizer import BoundedGradientProjection, LogBarrier


            

Reported by Pylint.

Too many instance attributes (15/7)
Error

Line: 16 Column: 1

              """


class AdaptiveWeight(ModelLayer):
    def __init__(
        self,
        model,
        input_record,
        name="adaptive_weight",

            

Reported by Pylint.

Missing class docstring
Error

Line: 16 Column: 1

              """


class AdaptiveWeight(ModelLayer):
    def __init__(
        self,
        model,
        input_record,
        name="adaptive_weight",

            

Reported by Pylint.

Too many arguments (10/5)
Error

Line: 17 Column: 5

              

class AdaptiveWeight(ModelLayer):
    def __init__(
        self,
        model,
        input_record,
        name="adaptive_weight",
        optimizer=None,

            

Reported by Pylint.

caffe2/python/lengths_reducer_rowwise_8bit_ops_test.py
21 issues
Missing module docstring
Error

Line: 1 Column: 1

              




from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu

import numpy as np

            

Reported by Pylint.

Function name "FakeQuantization8BitsRowwise" doesn't conform to snake_case naming style
Error

Line: 12 Column: 1

              import numpy as np


def FakeQuantization8BitsRowwise(data):
    min_el = np.min(data, axis=1)
    max_el = np.max(data, axis=1)
    scale = (max_el - min_el) / 255.
    bias = min_el
    inv_scale = 1. / scale

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 12 Column: 1

              import numpy as np


def FakeQuantization8BitsRowwise(data):
    min_el = np.min(data, axis=1)
    max_el = np.max(data, axis=1)
    scale = (max_el - min_el) / 255.
    bias = min_el
    inv_scale = 1. / scale

            

Reported by Pylint.

Missing class docstring
Error

Line: 23 Column: 1

                  return data.T


class TestQuantize8bits(hu.HypothesisTestCase):

    def test_quantize_op(self):
        op = core.CreateOperator(
            'FloatToRowwiseQuantized8Bits',
            ['input_data'],

            

Reported by Pylint.

Method could be a function
Error

Line: 25 Column: 5

              
class TestQuantize8bits(hu.HypothesisTestCase):

    def test_quantize_op(self):
        op = core.CreateOperator(
            'FloatToRowwiseQuantized8Bits',
            ['input_data'],
            ['quantized_input', 'scale_bias'])
        input_data = np.float32(np.asarray([[801., 786, 235.2, 2353.3434],

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 25 Column: 5

              
class TestQuantize8bits(hu.HypothesisTestCase):

    def test_quantize_op(self):
        op = core.CreateOperator(
            'FloatToRowwiseQuantized8Bits',
            ['input_data'],
            ['quantized_input', 'scale_bias'])
        input_data = np.float32(np.asarray([[801., 786, 235.2, 2353.3434],

            

Reported by Pylint.

Variable name "op" doesn't conform to snake_case naming style
Error

Line: 26 Column: 9

              class TestQuantize8bits(hu.HypothesisTestCase):

    def test_quantize_op(self):
        op = core.CreateOperator(
            'FloatToRowwiseQuantized8Bits',
            ['input_data'],
            ['quantized_input', 'scale_bias'])
        input_data = np.float32(np.asarray([[801., 786, 235.2, 2353.3434],
                                            [5., 11., 9., -2.]]))

            

Reported by Pylint.

Method could be a function
Error

Line: 44 Column: 5

                      np.testing.assert_array_almost_equal(
            result, ground_truth)

    def test_quantize_tensor_with_const_row_op(self):
        op = core.CreateOperator(
            'FloatToRowwiseQuantized8Bits',
            ['input_data'],
            ['quantized_input', 'scale_bias'])
        input_data = np.float32(np.asarray([[801., 786, 235.2, 2353.3434],

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 44 Column: 5

                      np.testing.assert_array_almost_equal(
            result, ground_truth)

    def test_quantize_tensor_with_const_row_op(self):
        op = core.CreateOperator(
            'FloatToRowwiseQuantized8Bits',
            ['input_data'],
            ['quantized_input', 'scale_bias'])
        input_data = np.float32(np.asarray([[801., 786, 235.2, 2353.3434],

            

Reported by Pylint.

Variable name "op" doesn't conform to snake_case naming style
Error

Line: 45 Column: 9

                          result, ground_truth)

    def test_quantize_tensor_with_const_row_op(self):
        op = core.CreateOperator(
            'FloatToRowwiseQuantized8Bits',
            ['input_data'],
            ['quantized_input', 'scale_bias'])
        input_data = np.float32(np.asarray([[801., 786, 235.2, 2353.3434],
                                            [9., 9., 9., 9.]]))

            

Reported by Pylint.

benchmarks/operator_benchmark/pt/remainder_test.py
21 issues
Unable to import 'torch'
Error

Line: 2 Column: 1

              import operator_benchmark as op_bench
import torch


"""Microbenchmarks for remainder operators."""


# Benchmark ops performance with broadcast
remainder_ops_list = op_bench.op_list(

            

Reported by Pylint.

Module 'operator_benchmark' has no 'op_list' member
Error

Line: 9 Column: 22

              

# Benchmark ops performance with broadcast
remainder_ops_list = op_bench.op_list(
    attr_names=['op_name', 'op_func'],
    attrs=[
        ['fmod', torch.fmod],
        ['remainder', torch.remainder],
    ],

            

Reported by Pylint.

Module 'operator_benchmark' has no 'config_list' member
Error

Line: 17 Column: 27

                  ],
)

remainder_short_configs = op_bench.config_list(
    attr_names=['M', 'N', 'K'],
    attrs=[
        [1, 1, 1],
        [64, 64, 64],
        [64, 64, 128],

            

Reported by Pylint.

Module 'operator_benchmark' has no 'cross_product_configs' member
Error

Line: 31 Column: 26

                  tags=['short'],
)

remainder_long_configs = op_bench.cross_product_configs(
    M=[8, 128],
    N=[32, 64],
    K=[256, 512],
    device=['cpu', 'cuda'],
    dtype=[torch.int32, torch.float, torch.double],

            

Reported by Pylint.

Module 'operator_benchmark' has no 'TorchBenchmarkBase' member
Error

Line: 41 Column: 28

              )


class RemainderOpBenchmark(op_bench.TorchBenchmarkBase):
    def init(self, M, N, K, device, dtype, op_func):
        self.dividend = torch.rand(M, N, K, device=device)
        self.dividend = (self.dividend * 1000 - 500).to(dtype=dtype)

        self.divisor = torch.rand(M, N, K, device=device)

            

Reported by Pylint.

Module 'operator_benchmark' has no 'generate_pt_tests_from_op_list' member
Error

Line: 61 Column: 1

                      return self.op_func(dividend, divisor)


op_bench.generate_pt_tests_from_op_list(remainder_ops_list,
                                        remainder_short_configs + remainder_long_configs,
                                        RemainderOpBenchmark)


if __name__ == "__main__":

            

Reported by Pylint.

String statement has no effect
Error

Line: 5 Column: 1

              import torch


"""Microbenchmarks for remainder operators."""


# Benchmark ops performance with broadcast
remainder_ops_list = op_bench.op_list(
    attr_names=['op_name', 'op_func'],

            

Reported by Pylint.

Attribute 'dividend' defined outside __init__
Error

Line: 43 Column: 9

              
class RemainderOpBenchmark(op_bench.TorchBenchmarkBase):
    def init(self, M, N, K, device, dtype, op_func):
        self.dividend = torch.rand(M, N, K, device=device)
        self.dividend = (self.dividend * 1000 - 500).to(dtype=dtype)

        self.divisor = torch.rand(M, N, K, device=device)
        # +1 so we don't divide by zero
        self.divisor = (self.divisor * 40 + 1).to(dtype=dtype)

            

Reported by Pylint.

Attribute 'dividend' defined outside __init__
Error

Line: 44 Column: 9

              class RemainderOpBenchmark(op_bench.TorchBenchmarkBase):
    def init(self, M, N, K, device, dtype, op_func):
        self.dividend = torch.rand(M, N, K, device=device)
        self.dividend = (self.dividend * 1000 - 500).to(dtype=dtype)

        self.divisor = torch.rand(M, N, K, device=device)
        # +1 so we don't divide by zero
        self.divisor = (self.divisor * 40 + 1).to(dtype=dtype)


            

Reported by Pylint.

Attribute 'divisor' defined outside __init__
Error

Line: 46 Column: 9

                      self.dividend = torch.rand(M, N, K, device=device)
        self.dividend = (self.dividend * 1000 - 500).to(dtype=dtype)

        self.divisor = torch.rand(M, N, K, device=device)
        # +1 so we don't divide by zero
        self.divisor = (self.divisor * 40 + 1).to(dtype=dtype)

        self.inputs = {
            "dividend": self.dividend,

            

Reported by Pylint.

torch/nn/utils/weight_norm.py
21 issues
No name 'norm_except_dim' in module 'torch'
Error

Line: 5 Column: 1

              Weight Normalization from https://arxiv.org/abs/1602.07868
"""
from torch.nn.parameter import Parameter, UninitializedParameter
from torch import _weight_norm, norm_except_dim
from typing import Any, TypeVar
from ..modules import Module


class WeightNorm(object):

            

Reported by Pylint.

No name '_weight_norm' in module 'torch'
Error

Line: 5 Column: 1

              Weight Normalization from https://arxiv.org/abs/1602.07868
"""
from torch.nn.parameter import Parameter, UninitializedParameter
from torch import _weight_norm, norm_except_dim
from typing import Any, TypeVar
from ..modules import Module


class WeightNorm(object):

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 7 Column: 1

              from torch.nn.parameter import Parameter, UninitializedParameter
from torch import _weight_norm, norm_except_dim
from typing import Any, TypeVar
from ..modules import Module


class WeightNorm(object):
    name: str
    dim: int

            

Reported by Pylint.

TODO Make return type more specific
Error

Line: 20 Column: 3

                      self.name = name
        self.dim = dim

    # TODO Make return type more specific
    def compute_weight(self, module: Module) -> Any:
        g = getattr(module, self.name + '_g')
        v = getattr(module, self.name + '_v')
        return _weight_norm(v, g, self.dim)


            

Reported by Pylint.

Unused variable 'k'
Error

Line: 28 Column: 13

              
    @staticmethod
    def apply(module, name: str, dim: int) -> 'WeightNorm':
        for k, hook in module._forward_pre_hooks.items():
            if isinstance(hook, WeightNorm) and hook.name == name:
                raise RuntimeError("Cannot register two weight_norm hooks on "
                                   "the same parameter {}".format(name))

        if dim is None:

            

Reported by Pylint.

Access to a protected member _forward_pre_hooks of a client class
Error

Line: 28 Column: 24

              
    @staticmethod
    def apply(module, name: str, dim: int) -> 'WeightNorm':
        for k, hook in module._forward_pre_hooks.items():
            if isinstance(hook, WeightNorm) and hook.name == name:
                raise RuntimeError("Cannot register two weight_norm hooks on "
                                   "the same parameter {}".format(name))

        if dim is None:

            

Reported by Pylint.

Access to a protected member _parameters of a client class
Error

Line: 44 Column: 13

                              'The module passed to `WeightNorm` can\'t have uninitialized parameters. '
                'Make sure to run the dummy forward before applying weight normalization')
        # remove w from parameter list
        del module._parameters[name]

        # add g and v as new parameters and express w as g/||v|| * v
        module.register_parameter(name + '_g', Parameter(norm_except_dim(weight, 2, dim).data))
        module.register_parameter(name + '_v', Parameter(weight.data))
        setattr(module, name, fn.compute_weight(module))

            

Reported by Pylint.

Access to a protected member _parameters of a client class
Error

Line: 59 Column: 13

                  def remove(self, module: Module) -> None:
        weight = self.compute_weight(module)
        delattr(module, self.name)
        del module._parameters[self.name + '_g']
        del module._parameters[self.name + '_v']
        setattr(module, self.name, Parameter(weight.data))

    def __call__(self, module: Module, inputs: Any) -> None:
        setattr(module, self.name, self.compute_weight(module))

            

Reported by Pylint.

Access to a protected member _parameters of a client class
Error

Line: 60 Column: 13

                      weight = self.compute_weight(module)
        delattr(module, self.name)
        del module._parameters[self.name + '_g']
        del module._parameters[self.name + '_v']
        setattr(module, self.name, Parameter(weight.data))

    def __call__(self, module: Module, inputs: Any) -> None:
        setattr(module, self.name, self.compute_weight(module))


            

Reported by Pylint.

Access to a protected member _forward_pre_hooks of a client class
Error

Line: 123 Column: 20

                      >>> m = weight_norm(nn.Linear(20, 40))
        >>> remove_weight_norm(m)
    """
    for k, hook in module._forward_pre_hooks.items():
        if isinstance(hook, WeightNorm) and hook.name == name:
            hook.remove(module)
            del module._forward_pre_hooks[k]
            return module


            

Reported by Pylint.

torch/multiprocessing/spawn.py
21 issues
Attempted relative import beyond top-level package
Error

Line: 9 Column: 1

              import sys
import warnings

from . import _prctl_pr_set_pdeathsig  # type: ignore[attr-defined]


class ProcessException(Exception):
    __slots__ = ["error_index", "error_pid"]


            

Reported by Pylint.

Module 'signal' has no 'Signals' member
Error

Line: 129 Column: 24

                      if self.error_queues[error_index].empty():
            exitcode = self.processes[error_index].exitcode
            if exitcode < 0:
                name = signal.Signals(-exitcode).name
                raise ProcessExitedException(
                    "process %d terminated with signal %s" %
                    (error_index, name),
                    error_index=error_index,
                    error_pid=failed_process.pid,

            

Reported by Pylint.

Useless super delegation in method '__init__'
Error

Line: 26 Column: 5

                  Exception is thrown when the process failed due to exception
    raised by the code.
    """
    def __init__(
        self,
        msg: str,
        error_index: int,
        error_pid: int,
    ):

            

Reported by Pylint.

Catching too general exception Exception
Error

Line: 62 Column: 12

                      fn(i, *args)
    except KeyboardInterrupt:
        pass  # SIGINT; Killed by parent, do nothing
    except Exception:
        # Propagate exception to parent process, keeping original traceback
        import traceback
        error_queue.put(traceback.format_exc())
        sys.exit(1)


            

Reported by Pylint.

Unnecessary pass statement
Error

Line: 157 Column: 5

                  def __init__(self, processes, error_queues):
        warnings.warn('SpawnContext is renamed to ProcessContext since 1.4 release.')
        super(SpawnContext, self).__init__(processes, error_queues)
    pass


# Note: [start_processes]
# mp.start_processes handles both start_method='spawn' and 'fork'. It's supposed to be a
# more generalized API than mp.spawn. Currently we only document mp.spawn as it's the

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              
from typing import Optional
import multiprocessing
import multiprocessing.connection
import signal
import sys
import warnings

from . import _prctl_pr_set_pdeathsig  # type: ignore[attr-defined]

            

Reported by Pylint.

Missing class docstring
Error

Line: 12 Column: 1

              from . import _prctl_pr_set_pdeathsig  # type: ignore[attr-defined]


class ProcessException(Exception):
    __slots__ = ["error_index", "error_pid"]

    def __init__(self, msg: str, error_index: int, pid: int):
        super().__init__(msg)
        self.error_index = error_index

            

Reported by Pylint.

Too many arguments (6/5)
Error

Line: 42 Column: 5

                  """
    __slots__ = ["exit_code"]

    def __init__(
            self, msg: str, error_index: int, error_pid: int,
            exit_code: int, signal_name: Optional[str] = None
    ):
        super().__init__(msg, error_index, error_pid)
        self.exit_code = exit_code

            

Reported by Pylint.

Argument name "fn" doesn't conform to snake_case naming style
Error

Line: 51 Column: 1

                      self.signal_name = signal_name


def _wrap(fn, i, args, error_queue):
    # prctl(2) is a Linux specific system call.
    # On other systems the following function call has no effect.
    # This is set to ensure that non-daemonic child processes can
    # terminate if their parent terminates before they do.
    _prctl_pr_set_pdeathsig(signal.SIGINT)

            

Reported by Pylint.

Import outside toplevel (traceback)
Error

Line: 64 Column: 9

                      pass  # SIGINT; Killed by parent, do nothing
    except Exception:
        # Propagate exception to parent process, keeping original traceback
        import traceback
        error_queue.put(traceback.format_exc())
        sys.exit(1)


class ProcessContext:

            

Reported by Pylint.

torch/fx/experimental/rewriter.py
21 issues
Use of exec detected.
Security

Line: 39
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b102_exec_used.html

                      code = compile(dest_ast, "", "exec")
        globals_dict = copy.copy(fn.__globals__)
        keys_before = set(globals_dict.keys())
        exec(code, globals_dict)
        new_keys = list(set(globals_dict.keys()) - keys_before)
        assert len(new_keys) == 1
        fn_compiled = globals_dict[new_keys[0]]

        # Return the correct FunctionType object

            

Reported by Bandit.

Use of exec
Error

Line: 39 Column: 9

                      code = compile(dest_ast, "", "exec")
        globals_dict = copy.copy(fn.__globals__)
        keys_before = set(globals_dict.keys())
        exec(code, globals_dict)
        new_keys = list(set(globals_dict.keys()) - keys_before)
        assert len(new_keys) == 1
        fn_compiled = globals_dict[new_keys[0]]

        # Return the correct FunctionType object

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import ast
import inspect
import textwrap
import copy
from types import FunctionType
from typing import cast, Union, Callable, Dict, Optional, Any
from torch.fx._symbolic_trace import Tracer
from torch.fx.graph import Graph
from torch._sources import normalize_source_lines

            

Reported by Pylint.

Class name "AST_Rewriter" doesn't conform to PascalCase naming style
Error

Line: 12 Column: 1

              from torch._sources import normalize_source_lines
import torch

class AST_Rewriter(ast.NodeTransformer):
    """
    Take a FunctionType object representing a `forward` method, then
    perform an AST rewrite to swap out nodes that are not symbolically
    traceable with a callsite to the FX alternative.


            

Reported by Pylint.

Argument name "fn" doesn't conform to snake_case naming style
Error

Line: 23 Column: 5

                  https://docs.python.org/3/library/ast.html#ast.NodeTransformer
    """

    def rewrite(self, fn: FunctionType):

        # Normalize the source lines
        sourcelines, _ = inspect.getsourcelines(fn)
        sourcelines = normalize_source_lines(sourcelines)
        source = ''.join(sourcelines)

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 23 Column: 5

                  https://docs.python.org/3/library/ast.html#ast.NodeTransformer
    """

    def rewrite(self, fn: FunctionType):

        # Normalize the source lines
        sourcelines, _ = inspect.getsourcelines(fn)
        sourcelines = normalize_source_lines(sourcelines)
        source = ''.join(sourcelines)

            

Reported by Pylint.

Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.
Security

Line: 41
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html

                      keys_before = set(globals_dict.keys())
        exec(code, globals_dict)
        new_keys = list(set(globals_dict.keys()) - keys_before)
        assert len(new_keys) == 1
        fn_compiled = globals_dict[new_keys[0]]

        # Return the correct FunctionType object
        return fn_compiled


            

Reported by Bandit.

Method name "visit_Assert" doesn't conform to snake_case naming style
Error

Line: 47 Column: 5

                      # Return the correct FunctionType object
        return fn_compiled

    def visit_Assert(self, node):
        """
        Swap out the Assert node (Python's `assert`) with a callsite to the
        symbolically-traceable torch._assert function
        """
        # Create the Call node

            

Reported by Pylint.

Method could be a function
Error

Line: 47 Column: 5

                      # Return the correct FunctionType object
        return fn_compiled

    def visit_Assert(self, node):
        """
        Swap out the Assert node (Python's `assert`) with a callsite to the
        symbolically-traceable torch._assert function
        """
        # Create the Call node

            

Reported by Pylint.

Variable name "n" doesn't conform to snake_case naming style
Error

Line: 53 Column: 9

                      symbolically-traceable torch._assert function
        """
        # Create the Call node
        n = ast.parse('torch._assert()', mode='eval')
        assert isinstance(n, ast.Expression)
        call_node = n.body
        assert isinstance(call_node, ast.Call)
        msg = node.msg if node.msg else ast.Constant(value="", kind=None)
        call_node.args = [node.test, msg]

            

Reported by Pylint.

torch/optim/optimizer.py
21 issues
Missing module docstring
Error

Line: 1 Column: 1

              from collections import defaultdict, abc as container_abcs

import torch
from copy import deepcopy
from itertools import chain
import warnings
import functools



            

Reported by Pylint.

standard import "from copy import deepcopy" should be placed before "import torch"
Error

Line: 4 Column: 1

              from collections import defaultdict, abc as container_abcs

import torch
from copy import deepcopy
from itertools import chain
import warnings
import functools



            

Reported by Pylint.

standard import "from itertools import chain" should be placed before "import torch"
Error

Line: 5 Column: 1

              
import torch
from copy import deepcopy
from itertools import chain
import warnings
import functools


class _RequiredParameter(object):

            

Reported by Pylint.

standard import "import warnings" should be placed before "import torch"
Error

Line: 6 Column: 1

              import torch
from copy import deepcopy
from itertools import chain
import warnings
import functools


class _RequiredParameter(object):
    """Singleton class representing a required parameter for an Optimizer."""

            

Reported by Pylint.

standard import "import functools" should be placed before "import torch"
Error

Line: 7 Column: 1

              from copy import deepcopy
from itertools import chain
import warnings
import functools


class _RequiredParameter(object):
    """Singleton class representing a required parameter for an Optimizer."""
    def __repr__(self):

            

Reported by Pylint.

Too few public methods (1/2)
Error

Line: 10 Column: 1

              import functools


class _RequiredParameter(object):
    """Singleton class representing a required parameter for an Optimizer."""
    def __repr__(self):
        return "<required parameter>"

required = _RequiredParameter()

            

Reported by Pylint.

Class '_RequiredParameter' inherits from object, can be safely removed from bases in python3
Error

Line: 10 Column: 1

              import functools


class _RequiredParameter(object):
    """Singleton class representing a required parameter for an Optimizer."""
    def __repr__(self):
        return "<required parameter>"

required = _RequiredParameter()

            

Reported by Pylint.

Class 'Optimizer' inherits from object, can be safely removed from bases in python3
Error

Line: 18 Column: 1

              required = _RequiredParameter()


class Optimizer(object):
    r"""Base class for all optimizers.

    .. warning::
        Parameters need to be specified as collections that have a deterministic
        ordering that is consistent between runs. Examples of objects that don't

            

Reported by Pylint.

Line too long (105/100)
Error

Line: 79 Column: 1

                      return format_string

    def _hook_for_profile(self):
        self._zero_grad_profile_name = "Optimizer.zero_grad#{}.zero_grad".format(self.__class__.__name__)

        def profile_hook_step(func):

            @functools.wraps(func)
            def wrapper(*args, **kwargs):

            

Reported by Pylint.

Unnecessary use of a comprehension
Error

Line: 149 Column: 1

                                           "that doesn't match the size of optimizer's group")

        # Update the state
        id_map = {old_id: p for old_id, p in
                  zip(chain.from_iterable((g['params'] for g in saved_groups)),
                      chain.from_iterable((g['params'] for g in groups)))}

        def cast(param, value):
            r"""Make a deep copy of value, casting all tensors to device of param."""

            

Reported by Pylint.