The following issues were found

test/onnx/model_defs/rnn_model_with_packed_sequence.py
19 issues
Unable to import 'torch'
Error

Line: 1 Column: 1

              from torch import nn
from torch.nn.utils import rnn as rnn_utils


class RnnModelWithPackedSequence(nn.Module):
    def __init__(self, model, batch_first):
        super(RnnModelWithPackedSequence, self).__init__()
        self.model = model
        self.batch_first = batch_first

            

Reported by Pylint.

Unable to import 'torch.nn.utils'
Error

Line: 2 Column: 1

              from torch import nn
from torch.nn.utils import rnn as rnn_utils


class RnnModelWithPackedSequence(nn.Module):
    def __init__(self, model, batch_first):
        super(RnnModelWithPackedSequence, self).__init__()
        self.model = model
        self.batch_first = batch_first

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 11 Column: 23

                      self.model = model
        self.batch_first = batch_first

    def forward(self, input, *args):
        args, seq_lengths = args[:-1], args[-1]
        input = rnn_utils.pack_padded_sequence(input, seq_lengths, self.batch_first)
        rets = self.model(input, *args)
        ret, rets = rets[0], rets[1:]
        ret, _ = rnn_utils.pad_packed_sequence(ret, self.batch_first)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 25 Column: 23

                      self.model = model
        self.batch_first = batch_first

    def forward(self, input, seq_lengths):
        input = rnn_utils.pack_padded_sequence(input, seq_lengths, self.batch_first)
        rets = self.model(input)
        ret, rets = rets[0], rets[1:]
        ret, _ = rnn_utils.pad_packed_sequence(ret, self.batch_first)
        return list([ret] + list(rets))

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 38 Column: 23

                      self.model = model
        self.batch_first = batch_first

    def forward(self, input, hx, seq_lengths):
        input = rnn_utils.pack_padded_sequence(input, seq_lengths, self.batch_first)
        rets = self.model(input, hx)
        ret, rets = rets[0], rets[1:]
        ret, _ = rnn_utils.pad_packed_sequence(ret, self.batch_first)
        return list([ret] + list(rets))

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from torch import nn
from torch.nn.utils import rnn as rnn_utils


class RnnModelWithPackedSequence(nn.Module):
    def __init__(self, model, batch_first):
        super(RnnModelWithPackedSequence, self).__init__()
        self.model = model
        self.batch_first = batch_first

            

Reported by Pylint.

Too few public methods (1/2)
Error

Line: 5 Column: 1

              from torch.nn.utils import rnn as rnn_utils


class RnnModelWithPackedSequence(nn.Module):
    def __init__(self, model, batch_first):
        super(RnnModelWithPackedSequence, self).__init__()
        self.model = model
        self.batch_first = batch_first


            

Reported by Pylint.

Missing class docstring
Error

Line: 5 Column: 1

              from torch.nn.utils import rnn as rnn_utils


class RnnModelWithPackedSequence(nn.Module):
    def __init__(self, model, batch_first):
        super(RnnModelWithPackedSequence, self).__init__()
        self.model = model
        self.batch_first = batch_first


            

Reported by Pylint.

Consider using Python 3 style super() without arguments
Error

Line: 7 Column: 9

              
class RnnModelWithPackedSequence(nn.Module):
    def __init__(self, model, batch_first):
        super(RnnModelWithPackedSequence, self).__init__()
        self.model = model
        self.batch_first = batch_first

    def forward(self, input, *args):
        args, seq_lengths = args[:-1], args[-1]

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 11 Column: 5

                      self.model = model
        self.batch_first = batch_first

    def forward(self, input, *args):
        args, seq_lengths = args[:-1], args[-1]
        input = rnn_utils.pack_padded_sequence(input, seq_lengths, self.batch_first)
        rets = self.model(input, *args)
        ret, rets = rets[0], rets[1:]
        ret, _ = rnn_utils.pad_packed_sequence(ret, self.batch_first)

            

Reported by Pylint.

caffe2/python/operator_test/numpy_tile_op_test.py
19 issues
Unable to import 'hypothesis'
Error

Line: 8 Column: 1

              
import numpy as np

from hypothesis import given, settings
import hypothesis.strategies as st
import unittest

from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu

            

Reported by Pylint.

Unable to import 'hypothesis.strategies'
Error

Line: 9 Column: 1

              import numpy as np

from hypothesis import given, settings
import hypothesis.strategies as st
import unittest

from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 22 Column: 47

                         seed=st.integers(min_value=0, max_value=65536),
           **hu.gcs_cpu_only)
    @settings(deadline=10000)
    def test_numpy_tile(self, ndim, seed, gc, dc):
        np.random.seed(seed)

        input_dims = np.random.randint(1, 4, size=ndim)
        input = np.random.randn(*input_dims)
        repeats = np.random.randint(1, 5, size=ndim)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 26 Column: 9

                      np.random.seed(seed)

        input_dims = np.random.randint(1, 4, size=ndim)
        input = np.random.randn(*input_dims)
        repeats = np.random.randint(1, 5, size=ndim)

        op = core.CreateOperator(
            'NumpyTile', ['input', 'repeats'], 'out',
        )

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 33 Column: 22

                          'NumpyTile', ['input', 'repeats'], 'out',
        )

        def tile_ref(input, repeats):
            tiled_data = np.tile(input, repeats)
            return (tiled_data,)

        # Check against numpy reference
        self.assertReferenceChecks(gc, op, [input, repeats],

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 45 Column: 56

                         seed=st.integers(min_value=0, max_value=65536),
           **hu.gcs_cpu_only)
    @settings(deadline=10000)
    def test_numpy_tile_zero_dim(self, ndim, seed, gc, dc):
        np.random.seed(seed)

        input_dims = np.random.randint(0, 4, size=ndim)
        input = np.random.randn(*input_dims)
        repeats = np.random.randint(0, 5, size=ndim)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 49 Column: 9

                      np.random.seed(seed)

        input_dims = np.random.randint(0, 4, size=ndim)
        input = np.random.randn(*input_dims)
        repeats = np.random.randint(0, 5, size=ndim)

        op = core.CreateOperator(
            'NumpyTile', ['input', 'repeats'], 'out',
        )

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 56 Column: 22

                          'NumpyTile', ['input', 'repeats'], 'out',
        )

        def tile_ref(input, repeats):
            tiled_data = np.tile(input, repeats)
            return (tiled_data,)

        # Check against numpy reference
        self.assertReferenceChecks(gc, op, [input, repeats],

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              




import numpy as np

from hypothesis import given, settings
import hypothesis.strategies as st

            

Reported by Pylint.

standard import "import unittest" should be placed before "import numpy as np"
Error

Line: 10 Column: 1

              
from hypothesis import given, settings
import hypothesis.strategies as st
import unittest

from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial


            

Reported by Pylint.

torch/utils/benchmark/examples/spectral_ops_fuzz_test.py
19 issues
Module 'torch' has no 'dtype' member
Error

Line: 22 Column: 55

                  raise ValueError(f"Expected ndim in range 1-3, got {ndim}")


def run_benchmark(name: str, function: object, dtype: torch.dtype, seed: int, device: str, samples: int,
                  probability_regular: float):
    cuda = device == 'cuda'
    spectral_fuzzer = SpectralOpFuzzer(seed=seed, dtype=dtype, cuda=cuda,
                                       probability_regular=probability_regular)
    results = []

            

Reported by Pylint.

Module 'torch' has no 'float32' member
Error

Line: 55 Column: 43

              
Benchmark = namedtuple('Benchmark', ['name', 'function', 'dtype'])
BENCHMARKS = [
    Benchmark('fft_real', torch.fft.fftn, torch.float32),
    Benchmark('fft_complex', torch.fft.fftn, torch.complex64),
    Benchmark('ifft', torch.fft.ifftn, torch.complex64),
    Benchmark('rfft', torch.fft.rfftn, torch.float32),
    Benchmark('irfft', torch.fft.irfftn, torch.complex64),
]

            

Reported by Pylint.

Module 'torch' has no 'complex64' member
Error

Line: 56 Column: 46

              Benchmark = namedtuple('Benchmark', ['name', 'function', 'dtype'])
BENCHMARKS = [
    Benchmark('fft_real', torch.fft.fftn, torch.float32),
    Benchmark('fft_complex', torch.fft.fftn, torch.complex64),
    Benchmark('ifft', torch.fft.ifftn, torch.complex64),
    Benchmark('rfft', torch.fft.rfftn, torch.float32),
    Benchmark('irfft', torch.fft.irfftn, torch.complex64),
]
BENCHMARK_MAP = {b.name: b for b in BENCHMARKS}

            

Reported by Pylint.

Module 'torch' has no 'complex64' member
Error

Line: 57 Column: 40

              BENCHMARKS = [
    Benchmark('fft_real', torch.fft.fftn, torch.float32),
    Benchmark('fft_complex', torch.fft.fftn, torch.complex64),
    Benchmark('ifft', torch.fft.ifftn, torch.complex64),
    Benchmark('rfft', torch.fft.rfftn, torch.float32),
    Benchmark('irfft', torch.fft.irfftn, torch.complex64),
]
BENCHMARK_MAP = {b.name: b for b in BENCHMARKS}
BENCHMARK_NAMES = [b.name for b in BENCHMARKS]

            

Reported by Pylint.

Module 'torch' has no 'float32' member
Error

Line: 58 Column: 40

                  Benchmark('fft_real', torch.fft.fftn, torch.float32),
    Benchmark('fft_complex', torch.fft.fftn, torch.complex64),
    Benchmark('ifft', torch.fft.ifftn, torch.complex64),
    Benchmark('rfft', torch.fft.rfftn, torch.float32),
    Benchmark('irfft', torch.fft.irfftn, torch.complex64),
]
BENCHMARK_MAP = {b.name: b for b in BENCHMARKS}
BENCHMARK_NAMES = [b.name for b in BENCHMARKS]
DEVICE_NAMES = ['cpu', 'cuda']

            

Reported by Pylint.

Module 'torch' has no 'complex64' member
Error

Line: 59 Column: 42

                  Benchmark('fft_complex', torch.fft.fftn, torch.complex64),
    Benchmark('ifft', torch.fft.ifftn, torch.complex64),
    Benchmark('rfft', torch.fft.rfftn, torch.float32),
    Benchmark('irfft', torch.fft.irfftn, torch.complex64),
]
BENCHMARK_MAP = {b.name: b for b in BENCHMARKS}
BENCHMARK_NAMES = [b.name for b in BENCHMARKS]
DEVICE_NAMES = ['cpu', 'cuda']


            

Reported by Pylint.

Redefining name 'device' from outer scope (line 97)
Error

Line: 22 Column: 79

                  raise ValueError(f"Expected ndim in range 1-3, got {ndim}")


def run_benchmark(name: str, function: object, dtype: torch.dtype, seed: int, device: str, samples: int,
                  probability_regular: float):
    cuda = device == 'cuda'
    spectral_fuzzer = SpectralOpFuzzer(seed=seed, dtype=dtype, cuda=cuda,
                                       probability_regular=probability_regular)
    results = []

            

Reported by Pylint.

Redefining name 'results' from outer scope (line 96)
Error

Line: 27 Column: 5

                  cuda = device == 'cuda'
    spectral_fuzzer = SpectralOpFuzzer(seed=seed, dtype=dtype, cuda=cuda,
                                       probability_regular=probability_regular)
    results = []
    for tensors, tensor_params, params in spectral_fuzzer.take(samples):
        shape = [params['k0'], params['k1'], params['k2']][:params['ndim']]
        str_shape = ' x '.join(["{:<4}".format(s) for s in shape])
        sub_label = f"{str_shape} {'' if tensor_params['x']['is_contiguous'] else '(discontiguous)'}"
        for dim in _dim_options(params['ndim']):

            

Reported by Pylint.

Redefining name 'results' from outer scope (line 96)
Error

Line: 65 Column: 23

              BENCHMARK_NAMES = [b.name for b in BENCHMARKS]
DEVICE_NAMES = ['cpu', 'cuda']

def _output_csv(file, results):
    file.write('benchmark,device,num_threads,numel,shape,contiguous,dim,mean (us),median (us),iqr (us)\n')
    for measurement in results:
        metadata = measurement.metadata
        device, dim, shape, name, numel, contiguous = (
            metadata['device'], metadata['dim'], metadata['shape'],

            

Reported by Pylint.

Redefining name 'device' from outer scope (line 97)
Error

Line: 69 Column: 9

                  file.write('benchmark,device,num_threads,numel,shape,contiguous,dim,mean (us),median (us),iqr (us)\n')
    for measurement in results:
        metadata = measurement.metadata
        device, dim, shape, name, numel, contiguous = (
            metadata['device'], metadata['dim'], metadata['shape'],
            metadata['name'], metadata['numel'], metadata['is_contiguous'])

        if isinstance(dim, Iterable):
            dim_str = '-'.join(str(d) for d in dim)

            

Reported by Pylint.

torch/nn/modules/dropout.py
19 issues
Attempted relative import beyond top-level package
Error

Line: 1 Column: 1

              from .module import Module
from .. import functional as F

from torch import Tensor


class _DropoutNd(Module):
    __constants__ = ['p', 'inplace']
    p: float

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 2 Column: 1

              from .module import Module
from .. import functional as F

from torch import Tensor


class _DropoutNd(Module):
    __constants__ = ['p', 'inplace']
    p: float

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 57 Column: 23

                      detectors: https://arxiv.org/abs/1207.0580
    """

    def forward(self, input: Tensor) -> Tensor:
        return F.dropout(input, self.p, self.training, self.inplace)


class Dropout2d(_DropoutNd):
    r"""Randomly zero out entire channels (a channel is a 2D feature map,

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 99 Column: 23

                     https://arxiv.org/abs/1411.4280
    """

    def forward(self, input: Tensor) -> Tensor:
        return F.dropout2d(input, self.p, self.training, self.inplace)


class Dropout3d(_DropoutNd):
    r"""Randomly zero out entire channels (a channel is a 3D feature map,

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 141 Column: 23

                     https://arxiv.org/abs/1411.4280
    """

    def forward(self, input: Tensor) -> Tensor:
        return F.dropout3d(input, self.p, self.training, self.inplace)


class AlphaDropout(_DropoutNd):
    r"""Applies Alpha Dropout over the input.

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 183 Column: 23

                  .. _Self-Normalizing Neural Networks: https://arxiv.org/abs/1706.02515
    """

    def forward(self, input: Tensor) -> Tensor:
        return F.alpha_dropout(input, self.p, self.training)


class FeatureAlphaDropout(_DropoutNd):
    r"""Randomly masks out entire channels (a channel is a feature map,

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 232 Column: 23

                     https://arxiv.org/abs/1411.4280
    """

    def forward(self, input: Tensor) -> Tensor:
        return F.feature_alpha_dropout(input, self.p, self.training)

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from .module import Module
from .. import functional as F

from torch import Tensor


class _DropoutNd(Module):
    __constants__ = ['p', 'inplace']
    p: float

            

Reported by Pylint.

third party import "from torch import Tensor" should be placed before "from .module import Module"
Error

Line: 4 Column: 1

              from .module import Module
from .. import functional as F

from torch import Tensor


class _DropoutNd(Module):
    __constants__ = ['p', 'inplace']
    p: float

            

Reported by Pylint.

Too few public methods (1/2)
Error

Line: 7 Column: 1

              from torch import Tensor


class _DropoutNd(Module):
    __constants__ = ['p', 'inplace']
    p: float
    inplace: bool

    def __init__(self, p: float = 0.5, inplace: bool = False) -> None:

            

Reported by Pylint.

torch/quantization/quantization_mappings.py
19 issues
Attempted relative import beyond top-level package
Error

Line: 18 Column: 1

              
from typing import Optional, Union, Dict, Set, Callable, Any

from .stubs import QuantStub, DeQuantStub
from .fake_quantize import (
    default_affine_fixed_qparams_fake_quant,
    default_symmetric_fixed_qparams_fake_quant,
)
from .utils import get_combined_dict

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 19 Column: 1

              from typing import Optional, Union, Dict, Set, Callable, Any

from .stubs import QuantStub, DeQuantStub
from .fake_quantize import (
    default_affine_fixed_qparams_fake_quant,
    default_symmetric_fixed_qparams_fake_quant,
)
from .utils import get_combined_dict


            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 23 Column: 1

                  default_affine_fixed_qparams_fake_quant,
    default_symmetric_fixed_qparams_fake_quant,
)
from .utils import get_combined_dict

# Default map for swapping float module to reference quantized modules
DEFAULT_REFERENCE_STATIC_QUANT_MODULE_MAPPINGS : Dict[Callable, Any] = {
    nn.Conv1d: nnqr.Conv1d,
    nn.Conv2d: nnqr.Conv2d,

            

Reported by Pylint.

TODO: merge with default static mapping
Error

Line: 133 Column: 3

              }

# Default mapping from floating point function or torch ops to quantized ops
# TODO: merge with default static mapping
DEFAULT_FLOAT_TO_QUANTIZED_OPERATOR_MAPPINGS : Dict[Union[Callable, str], Callable] = {
    F.elu: torch._ops.ops.quantized.elu,
    F.hardswish: torch._ops.ops.quantized.hardswish,
    F.instance_norm: torch._ops.ops.quantized.instance_norm,
    F.layer_norm: torch._ops.ops.quantized.layer_norm,

            

Reported by Pylint.

Access to a protected member _ops of a client class
Error

Line: 135 Column: 12

              # Default mapping from floating point function or torch ops to quantized ops
# TODO: merge with default static mapping
DEFAULT_FLOAT_TO_QUANTIZED_OPERATOR_MAPPINGS : Dict[Union[Callable, str], Callable] = {
    F.elu: torch._ops.ops.quantized.elu,
    F.hardswish: torch._ops.ops.quantized.hardswish,
    F.instance_norm: torch._ops.ops.quantized.instance_norm,
    F.layer_norm: torch._ops.ops.quantized.layer_norm,
    F.leaky_relu: torch._ops.ops.quantized.leaky_relu,
}

            

Reported by Pylint.

Access to a protected member _ops of a client class
Error

Line: 136 Column: 18

              # TODO: merge with default static mapping
DEFAULT_FLOAT_TO_QUANTIZED_OPERATOR_MAPPINGS : Dict[Union[Callable, str], Callable] = {
    F.elu: torch._ops.ops.quantized.elu,
    F.hardswish: torch._ops.ops.quantized.hardswish,
    F.instance_norm: torch._ops.ops.quantized.instance_norm,
    F.layer_norm: torch._ops.ops.quantized.layer_norm,
    F.leaky_relu: torch._ops.ops.quantized.leaky_relu,
}


            

Reported by Pylint.

Access to a protected member _ops of a client class
Error

Line: 137 Column: 22

              DEFAULT_FLOAT_TO_QUANTIZED_OPERATOR_MAPPINGS : Dict[Union[Callable, str], Callable] = {
    F.elu: torch._ops.ops.quantized.elu,
    F.hardswish: torch._ops.ops.quantized.hardswish,
    F.instance_norm: torch._ops.ops.quantized.instance_norm,
    F.layer_norm: torch._ops.ops.quantized.layer_norm,
    F.leaky_relu: torch._ops.ops.quantized.leaky_relu,
}

# mapping from module to output activation post process class

            

Reported by Pylint.

Access to a protected member _ops of a client class
Error

Line: 138 Column: 19

                  F.elu: torch._ops.ops.quantized.elu,
    F.hardswish: torch._ops.ops.quantized.hardswish,
    F.instance_norm: torch._ops.ops.quantized.instance_norm,
    F.layer_norm: torch._ops.ops.quantized.layer_norm,
    F.leaky_relu: torch._ops.ops.quantized.leaky_relu,
}

# mapping from module to output activation post process class
DEFAULT_MODULE_TO_ACT_POST_PROCESS : Dict[Callable, Callable] = {

            

Reported by Pylint.

Access to a protected member _ops of a client class
Error

Line: 139 Column: 19

                  F.hardswish: torch._ops.ops.quantized.hardswish,
    F.instance_norm: torch._ops.ops.quantized.instance_norm,
    F.layer_norm: torch._ops.ops.quantized.layer_norm,
    F.leaky_relu: torch._ops.ops.quantized.leaky_relu,
}

# mapping from module to output activation post process class
DEFAULT_MODULE_TO_ACT_POST_PROCESS : Dict[Callable, Callable] = {
    nn.Hardsigmoid: default_affine_fixed_qparams_fake_quant,

            

Reported by Pylint.

TODO: merge with get_static_quant_module_class
Error

Line: 236 Column: 3

              ) -> Dict[Union[Callable, str], Callable]:
    return copy.deepcopy(DEFAULT_FLOAT_TO_QUANTIZED_OPERATOR_MAPPINGS)

# TODO: merge with get_static_quant_module_class
def get_quantized_operator(float_op: Union[Callable, str]) -> Callable:
    ''' Get the quantized operator corresponding to the float operator
    '''
    quantized_op = DEFAULT_FLOAT_TO_QUANTIZED_OPERATOR_MAPPINGS.get(float_op, None)
    assert quantized_op is not None, \

            

Reported by Pylint.

tools/codegen/context.py
19 issues
Missing module docstring
Error

Line: 1 Column: 1

              from tools.codegen.utils import S, T, context
from tools.codegen.model import (NativeFunction, NativeFunctionsGroup, BackendIndex, DispatchKey)
import tools.codegen.local as local

import functools
from typing import TypeVar, Union, Iterator, Callable, Dict
import contextlib

# Helper functions for defining generators on things in the model

            

Reported by Pylint.

standard import "import functools" should be placed before "from tools.codegen.utils import S, T, context"
Error

Line: 5 Column: 1

              from tools.codegen.model import (NativeFunction, NativeFunctionsGroup, BackendIndex, DispatchKey)
import tools.codegen.local as local

import functools
from typing import TypeVar, Union, Iterator, Callable, Dict
import contextlib

# Helper functions for defining generators on things in the model


            

Reported by Pylint.

standard import "from typing import TypeVar, Union, Iterator, Callable, Dict" should be placed before "from tools.codegen.utils import S, T, context"
Error

Line: 6 Column: 1

              import tools.codegen.local as local

import functools
from typing import TypeVar, Union, Iterator, Callable, Dict
import contextlib

# Helper functions for defining generators on things in the model

F = TypeVar(

            

Reported by Pylint.

standard import "import contextlib" should be placed before "from tools.codegen.utils import S, T, context"
Error

Line: 7 Column: 1

              
import functools
from typing import TypeVar, Union, Iterator, Callable, Dict
import contextlib

# Helper functions for defining generators on things in the model

F = TypeVar(
    'F',

            

Reported by Pylint.

Class name "F" doesn't conform to PascalCase naming style
Error

Line: 11 Column: 1

              
# Helper functions for defining generators on things in the model

F = TypeVar(
    'F',
    NativeFunction,
    NativeFunctionsGroup,
    Union[NativeFunction, NativeFunctionsGroup],
)

            

Reported by Pylint.

Argument name "g" doesn't conform to snake_case naming style
Error

Line: 19 Column: 1

              )

@contextlib.contextmanager
def native_function_manager(g: Union[NativeFunctionsGroup, NativeFunction]) -> Iterator[None]:
    if isinstance(g, NativeFunctionsGroup):
        # By default, we associate all errors with structured native functions
        # with the out variant.  In some cases, it might be better to have
        # a more specific place to hang things; if so, use
        # native_function_manager again on the inside

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 19 Column: 1

              )

@contextlib.contextmanager
def native_function_manager(g: Union[NativeFunctionsGroup, NativeFunction]) -> Iterator[None]:
    if isinstance(g, NativeFunctionsGroup):
        # By default, we associate all errors with structured native functions
        # with the out variant.  In some cases, it might be better to have
        # a more specific place to hang things; if so, use
        # native_function_manager again on the inside

            

Reported by Pylint.

Variable name "f" doesn't conform to snake_case naming style
Error

Line: 25 Column: 9

                      # with the out variant.  In some cases, it might be better to have
        # a more specific place to hang things; if so, use
        # native_function_manager again on the inside
        f = g.out
    else:
        f = g
    with context(lambda: f'in native_functions.yaml line {f.loc}:\n  {f.func}'):
        with local.parametrize(use_const_ref_for_mutable_tensors=f.use_const_ref_for_mutable_tensors):
            yield

            

Reported by Pylint.

Variable name "f" doesn't conform to snake_case naming style
Error

Line: 27 Column: 9

                      # native_function_manager again on the inside
        f = g.out
    else:
        f = g
    with context(lambda: f'in native_functions.yaml line {f.loc}:\n  {f.func}'):
        with local.parametrize(use_const_ref_for_mutable_tensors=f.use_const_ref_for_mutable_tensors):
            yield

# Given a function that operates on NativeFunction, wrap it into a new function

            

Reported by Pylint.

Line too long (102/100)
Error

Line: 29 Column: 1

                  else:
        f = g
    with context(lambda: f'in native_functions.yaml line {f.loc}:\n  {f.func}'):
        with local.parametrize(use_const_ref_for_mutable_tensors=f.use_const_ref_for_mutable_tensors):
            yield

# Given a function that operates on NativeFunction, wrap it into a new function
# that sets some appropriate context managers for that native function.
# YOU MUST WRAP FUNCTIONS IN THIS for calls to api modules to be sound

            

Reported by Pylint.

torch/ao/sparsity/scheduler/base_scheduler.py
19 issues
Assigning result of a function call, where the function has no return
Error

Line: 142 Column: 13

              
        with _enable_get_sl_call(self):
            self.last_epoch += 1
            values = self.get_sl()

        for i, data in enumerate(zip(self.sparsifier.module_groups, values)):
            param_group, sl = data
            param_group['sparsity_level'] = sl
            self.print_sl(self.verbose, i, sl, epoch)

            

Reported by Pylint.

Access to a protected member _with_counter of a client class
Error

Line: 48 Column: 13

              
            # Note that the returned function here is no longer a bound method,
            # so attributes like `__func__` and `__self__` no longer exist.
            wrapper._with_counter = True  # type: ignore[attr-defined]
            return wrapper

        self.sparsifier.step = with_counter(self.sparsifier.step)  # type: ignore[assignment]
        self.sparsifier._step_count = 0  # type: ignore[attr-defined]
        self._step_count: int = 0

            

Reported by Pylint.

Access to a protected member _step_count of a client class
Error

Line: 122 Column: 18

                                            "`scheduler.step()`.", UserWarning)

            # Just check if there were two first scheduler.step() calls before sparsifier.step()
            elif self.sparsifier._step_count < 1:  # type: ignore[attr-defined]
                warnings.warn("Detected call of `scheduler.step()` before `sparsifier.step()`. "
                              "You have to make sure you run the sparsifier.step() BEFORE any "
                              "calls to the scheduer.step().", UserWarning)
        self._step_count += 1


            

Reported by Pylint.

Redefining built-in 'type'
Error

Line: 137 Column: 32

                              self.o._get_sl_called_within_step = True
                return self

            def __exit__(self, type, value, traceback):
                self.o._get_sl_called_within_step = False

        with _enable_get_sl_call(self):
            self.last_epoch += 1
            values = self.get_sl()

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              
from torch.ao.sparsity import BaseSparsifier

from functools import wraps
import warnings
import weakref

class BaseScheduler(object):


            

Reported by Pylint.

standard import "from functools import wraps" should be placed before "from torch.ao.sparsity import BaseSparsifier"
Error

Line: 4 Column: 1

              
from torch.ao.sparsity import BaseSparsifier

from functools import wraps
import warnings
import weakref

class BaseScheduler(object):


            

Reported by Pylint.

standard import "import warnings" should be placed before "from torch.ao.sparsity import BaseSparsifier"
Error

Line: 5 Column: 1

              from torch.ao.sparsity import BaseSparsifier

from functools import wraps
import warnings
import weakref

class BaseScheduler(object):

    def __init__(self, sparsifier, last_epoch=-1, verbose=False):

            

Reported by Pylint.

standard import "import weakref" should be placed before "from torch.ao.sparsity import BaseSparsifier"
Error

Line: 6 Column: 1

              
from functools import wraps
import warnings
import weakref

class BaseScheduler(object):

    def __init__(self, sparsifier, last_epoch=-1, verbose=False):


            

Reported by Pylint.

Class 'BaseScheduler' inherits from object, can be safely removed from bases in python3
Error

Line: 8 Column: 1

              import warnings
import weakref

class BaseScheduler(object):

    def __init__(self, sparsifier, last_epoch=-1, verbose=False):

        # Attach sparsifier
        if not isinstance(sparsifier, BaseSparsifier):

            

Reported by Pylint.

Missing class docstring
Error

Line: 8 Column: 1

              import warnings
import weakref

class BaseScheduler(object):

    def __init__(self, sparsifier, last_epoch=-1, verbose=False):

        # Attach sparsifier
        if not isinstance(sparsifier, BaseSparsifier):

            

Reported by Pylint.

torch/cuda/random.py
19 issues
Attempted relative import beyond top-level package
Error

Line: 3 Column: 1

              import torch
from typing import cast, Iterable, List, Union
from . import _lazy_init, _lazy_call, device_count, current_device
from .. import Tensor

__all__ = ['get_rng_state', 'get_rng_state_all',
           'set_rng_state', 'set_rng_state_all',
           'manual_seed', 'manual_seed_all',
           'seed', 'seed_all', 'initial_seed']

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 4 Column: 1

              import torch
from typing import cast, Iterable, List, Union
from . import _lazy_init, _lazy_call, device_count, current_device
from .. import Tensor

__all__ = ['get_rng_state', 'get_rng_state_all',
           'set_rng_state', 'set_rng_state_all',
           'manual_seed', 'manual_seed_all',
           'seed', 'seed_all', 'initial_seed']

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 12 Column: 43

                         'seed', 'seed_all', 'initial_seed']


def get_rng_state(device: Union[int, str, torch.device] = 'cuda') -> Tensor:
    r"""Returns the random number generator state of the specified GPU as a ByteTensor.

    Args:
        device (torch.device or int, optional): The device to return the RNG state of.
            Default: ``'cuda'`` (i.e., ``torch.device('cuda')``, the current CUDA device).

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 24 Column: 18

                  """
    _lazy_init()
    if isinstance(device, str):
        device = torch.device(device)
    elif isinstance(device, int):
        device = torch.device('cuda', device)
    idx = device.index
    if idx is None:
        idx = current_device()

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 26 Column: 18

                  if isinstance(device, str):
        device = torch.device(device)
    elif isinstance(device, int):
        device = torch.device('cuda', device)
    idx = device.index
    if idx is None:
        idx = current_device()
    default_generator = torch.cuda.default_generators[idx]
    return default_generator.get_state()

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 43 Column: 62

                  return results


def set_rng_state(new_state: Tensor, device: Union[int, str, torch.device] = 'cuda') -> None:
    r"""Sets the random number generator state of the specified GPU.

    Args:
        new_state (torch.ByteTensor): The desired state
        device (torch.device or int, optional): The device to set the RNG state.

            

Reported by Pylint.

Module 'torch' has no 'contiguous_format' member
Error

Line: 51 Column: 52

                      device (torch.device or int, optional): The device to set the RNG state.
            Default: ``'cuda'`` (i.e., ``torch.device('cuda')``, the current CUDA device).
    """
    new_state_copy = new_state.clone(memory_format=torch.contiguous_format)
    if isinstance(device, str):
        device = torch.device(device)
    elif isinstance(device, int):
        device = torch.device('cuda', device)


            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 53 Column: 18

                  """
    new_state_copy = new_state.clone(memory_format=torch.contiguous_format)
    if isinstance(device, str):
        device = torch.device(device)
    elif isinstance(device, int):
        device = torch.device('cuda', device)

    def cb():
        idx = cast(torch.device, device).index

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 55 Column: 18

                  if isinstance(device, str):
        device = torch.device(device)
    elif isinstance(device, int):
        device = torch.device('cuda', device)

    def cb():
        idx = cast(torch.device, device).index
        if idx is None:
            idx = current_device()

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 58 Column: 20

                      device = torch.device('cuda', device)

    def cb():
        idx = cast(torch.device, device).index
        if idx is None:
            idx = current_device()
        default_generator = torch.cuda.default_generators[idx]
        default_generator.set_state(new_state_copy)


            

Reported by Pylint.

tools/codegen/utils.py
19 issues
TODO: Use a real parser here; this will get bamboozled
Error

Line: 45 Column: 3

              # occurrence of a parameter in the derivative formula
IDENT_REGEX = r'(^|\W){}($|\W)'

# TODO: Use a real parser here; this will get bamboozled
def split_name_params(schema: str) -> Tuple[str, List[str]]:
    m = re.match(r'(\w+)(\.\w+)?\((.*)\)', schema)
    if m is None:
        raise RuntimeError(f'Unsupported function schema: {schema}')
    name, _, params = m.groups()

            

Reported by Pylint.

TODO: this does the wrong thing with KeyError
Error

Line: 80 Column: 3

                  try:
        yield
    except Exception as e:
        # TODO: this does the wrong thing with KeyError
        msg = msg_fn()
        msg = textwrap.indent(msg, '  ')
        msg = f'{e.args[0]}\n{msg}' if e.args else msg
        e.args = (msg,) + e.args[1:]
        raise

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import re
from typing import Tuple, List, Iterable, Iterator, Callable, Sequence, TypeVar, Optional
from enum import Enum
import contextlib
import textwrap

# Safely load fast C Yaml loader/dumper if they are available
try:
    from yaml import CSafeLoader as Loader

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 46 Column: 1

              IDENT_REGEX = r'(^|\W){}($|\W)'

# TODO: Use a real parser here; this will get bamboozled
def split_name_params(schema: str) -> Tuple[str, List[str]]:
    m = re.match(r'(\w+)(\.\w+)?\((.*)\)', schema)
    if m is None:
        raise RuntimeError(f'Unsupported function schema: {schema}')
    name, _, params = m.groups()
    return name, params.split(', ')

            

Reported by Pylint.

Variable name "m" doesn't conform to snake_case naming style
Error

Line: 47 Column: 5

              
# TODO: Use a real parser here; this will get bamboozled
def split_name_params(schema: str) -> Tuple[str, List[str]]:
    m = re.match(r'(\w+)(\.\w+)?\((.*)\)', schema)
    if m is None:
        raise RuntimeError(f'Unsupported function schema: {schema}')
    name, _, params = m.groups()
    return name, params.split(', ')


            

Reported by Pylint.

Class name "T" doesn't conform to PascalCase naming style
Error

Line: 53 Column: 1

                  name, _, params = m.groups()
    return name, params.split(', ')

T = TypeVar('T')
S = TypeVar('S')

# These two functions purposely return generators in analogy to map()
# so that you don't mix up when you need to list() them


            

Reported by Pylint.

Class name "S" doesn't conform to PascalCase naming style
Error

Line: 54 Column: 1

                  return name, params.split(', ')

T = TypeVar('T')
S = TypeVar('S')

# These two functions purposely return generators in analogy to map()
# so that you don't mix up when you need to list() them

# Map over function that may return None; omit Nones from output sequence

            

Reported by Pylint.

Argument name "xs" doesn't conform to snake_case naming style
Error

Line: 60 Column: 1

              # so that you don't mix up when you need to list() them

# Map over function that may return None; omit Nones from output sequence
def mapMaybe(func: Callable[[T], Optional[S]], xs: Iterable[T]) -> Iterator[S]:
    for x in xs:
        r = func(x)
        if r is not None:
            yield r


            

Reported by Pylint.

Missing function or method docstring
Error

Line: 60 Column: 1

              # so that you don't mix up when you need to list() them

# Map over function that may return None; omit Nones from output sequence
def mapMaybe(func: Callable[[T], Optional[S]], xs: Iterable[T]) -> Iterator[S]:
    for x in xs:
        r = func(x)
        if r is not None:
            yield r


            

Reported by Pylint.

Function name "mapMaybe" doesn't conform to snake_case naming style
Error

Line: 60 Column: 1

              # so that you don't mix up when you need to list() them

# Map over function that may return None; omit Nones from output sequence
def mapMaybe(func: Callable[[T], Optional[S]], xs: Iterable[T]) -> Iterator[S]:
    for x in xs:
        r = func(x)
        if r is not None:
            yield r


            

Reported by Pylint.

tools/codegen/selective_build/operator.py
19 issues
Lambda may not be necessary
Error

Line: 75 Column: 36

                      if 'debug_info' in op_info:
            di_list = op_info['debug_info']
            assert isinstance(di_list, list)
            debug_info = tuple(map(lambda x: str(x), di_list))

        return SelectiveBuildOperator(
            name=op_name,
            is_root_operator=is_root_operator,
            is_used_for_training=is_used_for_training,

            

Reported by Pylint.

Access to a protected member _debug_info of a client class
Error

Line: 140 Column: 38

                      # in this instance of the pytorch library.
        is_used_for_training=lhs.is_used_for_training or rhs.is_used_for_training,
        include_all_overloads=lhs.include_all_overloads or rhs.include_all_overloads,
        _debug_info=merge_debug_info(lhs._debug_info, rhs._debug_info),
    )

def merge_operator_dicts(
        lhs: Dict[str, SelectiveBuildOperator],
        rhs: Dict[str, SelectiveBuildOperator],

            

Reported by Pylint.

Access to a protected member _debug_info of a client class
Error

Line: 140 Column: 55

                      # in this instance of the pytorch library.
        is_used_for_training=lhs.is_used_for_training or rhs.is_used_for_training,
        include_all_overloads=lhs.include_all_overloads or rhs.include_all_overloads,
        _debug_info=merge_debug_info(lhs._debug_info, rhs._debug_info),
    )

def merge_operator_dicts(
        lhs: Dict[str, SelectiveBuildOperator],
        rhs: Dict[str, SelectiveBuildOperator],

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from typing import Dict, Optional, Tuple
from dataclasses import dataclass

# This class holds information about a single operator used to determine
# the outcome of a selective/custom PyTorch build that doesn't include
# registration code for all the supported operators. This is done to
# reduce the size of the generated binary so that it can be deployed in
# situations where binary size comes at a premium.
#

            

Reported by Pylint.

Missing class docstring
Error

Line: 11 Column: 1

              # situations where binary size comes at a premium.
#
@dataclass(frozen=True)
class SelectiveBuildOperator():
    # The name of the operator. This includes the aten::, etc... prefix
    # The operator name may or may not have the overload name. If this
    # operator name does not specify an overload name, the way to determine
    # if this entry refers to the family of operators with this base name
    # or just the operator with this name is to look at the value of the

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 51 Column: 5

                  _debug_info: Optional[Tuple[str, ...]]

    @staticmethod
    def from_yaml_dict(op_name: str, op_info: Dict[str, object]) -> 'SelectiveBuildOperator':
        allowed_keys = {'name', 'is_root_operator', 'is_used_for_training', 'include_all_overloads', 'debug_info'}

        if len(set(op_info.keys()) - allowed_keys) > 0:
            raise Exception("Got unexpected top level keys: {}".format(
                ",".join(set(op_info.keys()) - allowed_keys),

            

Reported by Pylint.

Line too long (114/100)
Error

Line: 52 Column: 1

              
    @staticmethod
    def from_yaml_dict(op_name: str, op_info: Dict[str, object]) -> 'SelectiveBuildOperator':
        allowed_keys = {'name', 'is_root_operator', 'is_used_for_training', 'include_all_overloads', 'debug_info'}

        if len(set(op_info.keys()) - allowed_keys) > 0:
            raise Exception("Got unexpected top level keys: {}".format(
                ",".join(set(op_info.keys()) - allowed_keys),
            ))

            

Reported by Pylint.

Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.
Security

Line: 60
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html

                          ))

        if 'name' in op_info:
            assert op_name == op_info['name']

        is_root_operator = op_info.get('is_root_operator', True)
        assert isinstance(is_root_operator, bool)

        is_used_for_training = op_info.get('is_used_for_training', True)

            

Reported by Bandit.

Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.
Security

Line: 63
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html

                          assert op_name == op_info['name']

        is_root_operator = op_info.get('is_root_operator', True)
        assert isinstance(is_root_operator, bool)

        is_used_for_training = op_info.get('is_used_for_training', True)
        assert isinstance(is_used_for_training, bool)

        include_all_overloads = op_info.get('include_all_overloads', True)

            

Reported by Bandit.

Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.
Security

Line: 66
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html

                      assert isinstance(is_root_operator, bool)

        is_used_for_training = op_info.get('is_used_for_training', True)
        assert isinstance(is_used_for_training, bool)

        include_all_overloads = op_info.get('include_all_overloads', True)
        assert isinstance(include_all_overloads, bool)

        debug_info: Optional[Tuple[str, ...]] = None

            

Reported by Bandit.