The following issues were found

caffe2/quantization/server/elementwise_sum_dnnlowp_op_test.py
45 issues
Unable to import 'hypothesis.strategies'
Error

Line: 6 Column: 1

              import collections

import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep, workspace
from caffe2.quantization.server.dnnlowp_test_utils import check_quantized_results_close
from hypothesis import given


            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 10 Column: 1

              import numpy as np
from caffe2.python import core, dyndep, workspace
from caffe2.quantization.server.dnnlowp_test_utils import check_quantized_results_close
from hypothesis import given


dyndep.InitOpsLibrary("//caffe2/caffe2/quantization/server:dnnlowp_ops")
workspace.GlobalInit(["caffe2", "--caffe2_omp_num_threads=11"])


            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 25 Column: 68

                      is_empty=st.booleans(),
        **hu.gcs_cpu_only
    )
    def test_dnnlowp_elementwise_sum_int(self, N, M, is_empty, gc, dc):
        if is_empty:
            N = 0
        # All inputs have scale 1, so exactly represented after quantization
        inputs = M * [None]
        X_names = M * [None]

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 88 Column: 66

              
    # correctness test with no quantization error in inputs
    @given(N=st.integers(32, 256), M=st.integers(1, 3), **hu.gcs_cpu_only)
    def test_dnnlowp_elementwise_sum_int_inplace(self, N, M, gc, dc):
        # All inputs have scale 1, so exactly represented after quantization
        inputs = M * [None]
        X_names = M * [None]
        X_q_names = M * [None]


            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 154 Column: 63

              
    # correctness test with no quantization error in inputs
    @given(N=st.integers(32, 256), M=st.integers(1, 3), **hu.gcs_cpu_only)
    def test_dnnlowp_elementwise_sum_relu_int(self, N, M, gc, dc):
        # All inputs have scale 1, so exactly represented after quantization
        inputs = M * [None]
        X_names = M * [None]
        X_q_names = M * [None]


            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 218 Column: 71

              
    # correctness test with no quantization error in inputs
    @given(N=st.integers(32, 256), M=st.integers(1, 3), **hu.gcs_cpu_only)
    def test_dnnlowp_elementwise_sum_relu_int_inplace(self, N, M, gc, dc):
        # All inputs have scale 1, so exactly represented after quantization
        inputs = M * [None]
        X_names = M * [None]
        X_q_names = M * [None]


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              

import collections

import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep, workspace
from caffe2.quantization.server.dnnlowp_test_utils import check_quantized_results_close

            

Reported by Pylint.

Missing class docstring
Error

Line: 17 Column: 1

              workspace.GlobalInit(["caffe2", "--caffe2_omp_num_threads=11"])


class DNNLowPOpSumOpTest(hu.HypothesisTestCase):
    # correctness test with no quantization error in inputs
    @given(
        N=st.integers(32, 256),
        M=st.integers(1, 3),
        is_empty=st.booleans(),

            

Reported by Pylint.

Argument name "dc" doesn't conform to snake_case naming style
Error

Line: 24 Column: 5

                      M=st.integers(1, 3),
        is_empty=st.booleans(),
        **hu.gcs_cpu_only
    )
    def test_dnnlowp_elementwise_sum_int(self, N, M, is_empty, gc, dc):
        if is_empty:
            N = 0
        # All inputs have scale 1, so exactly represented after quantization
        inputs = M * [None]

            

Reported by Pylint.

Too many arguments (6/5)
Error

Line: 24 Column: 5

                      M=st.integers(1, 3),
        is_empty=st.booleans(),
        **hu.gcs_cpu_only
    )
    def test_dnnlowp_elementwise_sum_int(self, N, M, is_empty, gc, dc):
        if is_empty:
            N = 0
        # All inputs have scale 1, so exactly represented after quantization
        inputs = M * [None]

            

Reported by Pylint.

caffe2/python/optimizer_test_util.py
45 issues
Instance of 'OptimizerTestBase' has no 'assertIsInstance' member
Error

Line: 47 Column: 9

                      sq = model.SquaredL2Distance([out, 'label'])
        loss = model.AveragedLoss(sq, "avg_loss")
        grad_map = model.AddGradientOperators([loss])
        self.assertIsInstance(grad_map['fc_w'], core.BlobReference)
        return (model, perfect_model, data, label)

    def testDense(self):
        model, perfect_model, data, label = self._createDense()
        optimizer = self.build_optimizer(model)

            

Reported by Pylint.

Instance of 'OptimizerTestBase' has no 'build_optimizer' member
Error

Line: 52 Column: 21

              
    def testDense(self):
        model, perfect_model, data, label = self._createDense()
        optimizer = self.build_optimizer(model)
        workspace.FeedBlob('data', data[0])
        workspace.FeedBlob('label', label[0])
        workspace.RunNetOnce(model.param_init_net)
        workspace.CreateNet(model.net, True)
        for _ in range(2000):

            

Reported by Pylint.

Instance of 'OptimizerTestBase' has no 'check_optimizer' member
Error

Line: 68 Column: 9

                          workspace.FetchBlob('fc_w'),
            atol=1e-2
        )
        self.check_optimizer(optimizer)

    @unittest.skipIf(not workspace.has_gpu_support, "No gpu support")
    def testGPUDense(self, dtype=core.DataType.FLOAT):
        device_opt = core.DeviceOption(workspace.GpuDeviceType, 0)
        with core.DeviceScope(device_opt):

            

Reported by Pylint.

Instance of 'OptimizerTestBase' has no 'build_optimizer' member
Error

Line: 87 Column: 9

                      brew.fc(model, 'fc_cpu', 'fc2', dim_in=1, dim_out=10, axis=0)

        # Create optimizer in default device scope
        self.build_optimizer(model)

        if self._skip_gpu:
            return

        # Run net to see it does not crash

            

Reported by Pylint.

Instance of 'OptimizerTestBase' has no '_skip_gpu' member
Error

Line: 89 Column: 12

                      # Create optimizer in default device scope
        self.build_optimizer(model)

        if self._skip_gpu:
            return

        # Run net to see it does not crash
        workspace.RunNetOnce(model.param_init_net)
        workspace.CreateNet(model.net, True)

            

Reported by Pylint.

Instance of 'OptimizerTestBase' has no 'assertIsInstance' member
Error

Line: 119 Column: 9

                      sq = model.SquaredL2Distance([out, 'label'])
        loss = model.AveragedLoss(sq, "avg_loss")
        grad_map = model.AddGradientOperators([loss])
        self.assertIsInstance(grad_map['w'], core.GradientSlice)
        optimizer = self.build_optimizer(model)

        workspace.CreateBlob('indices')
        workspace.CreateBlob('label')


            

Reported by Pylint.

Instance of 'OptimizerTestBase' has no 'build_optimizer' member
Error

Line: 120 Column: 21

                      loss = model.AveragedLoss(sq, "avg_loss")
        grad_map = model.AddGradientOperators([loss])
        self.assertIsInstance(grad_map['w'], core.GradientSlice)
        optimizer = self.build_optimizer(model)

        workspace.CreateBlob('indices')
        workspace.CreateBlob('label')

        for indices_type in [np.int32, np.int64]:

            

Reported by Pylint.

Instance of 'OptimizerTestBase' has no 'check_optimizer' member
Error

Line: 148 Column: 9

                              workspace.FetchBlob('w'),
                atol=1e-2
            )
        self.check_optimizer(optimizer)


class LRModificationTestBase(object):
    """
    This is an abstract base class.

            

Reported by Pylint.

Instance of 'LRModificationTestBase' has no '_createDense' member
Error

Line: 178 Column: 45

              
    def test_global_norm_based_gradient_clipping(self):
        max_gradient_norm = 1.0
        model, perfect_model, data, label = self._createDense()
        opt = self.build_optimizer(model, max_gradient_norm=max_gradient_norm)

        params = []
        for param in model.GetParams(top_scope=True):
            if param in model.param_to_grad:

            

Reported by Pylint.

Instance of 'LRModificationTestBase' has no 'build_optimizer' member
Error

Line: 179 Column: 15

                  def test_global_norm_based_gradient_clipping(self):
        max_gradient_norm = 1.0
        model, perfect_model, data, label = self._createDense()
        opt = self.build_optimizer(model, max_gradient_norm=max_gradient_norm)

        params = []
        for param in model.GetParams(top_scope=True):
            if param in model.param_to_grad:
                if not isinstance(

            

Reported by Pylint.

caffe2/python/operator_test/adagrad_test.py
45 issues
Unable to import 'hypothesis.strategies'
Error

Line: 5 Column: 1

              
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core
from caffe2.python.operator_test.adagrad_test_helper import (
    adagrad_sparse_test_helper,
    ref_adagrad,

            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 12 Column: 1

                  adagrad_sparse_test_helper,
    ref_adagrad,
)
from hypothesis import HealthCheck, given, settings


class TestAdagrad(serial.SerializedTestCase):
    @given(
        inputs=hu.tensors(n=3),

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 28 Column: 67

                      **hu.gcs
    )
    @settings(deadline=10000)
    def test_adagrad(self, inputs, lr, epsilon, weight_decay, gc, dc):
        param, momentum, grad = inputs
        momentum = np.abs(momentum)
        lr = np.array([lr], dtype=np.float32)

        op = core.CreateOperator(

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 62 Column: 54

                  )
    @settings(deadline=10000)
    def test_adagrad_output_effective_lr(
        self, inputs, lr, epsilon, weight_decay, gc, dc
    ):
        param, momentum, grad = inputs
        momentum = np.abs(momentum)
        lr = np.array([lr], dtype=np.float32)


            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 100 Column: 84

                      **hu.gcs_cpu_only
    )
    @settings(deadline=10000)
    def test_adagrad_output_effective_lr_and_update(self, inputs, lr, epsilon, gc, dc):
        param, momentum, grad = inputs
        momentum = np.abs(momentum)
        lr = np.array([lr], dtype=np.float32)

        op = core.CreateOperator(

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import functools

import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core
from caffe2.python.operator_test.adagrad_test_helper import (
    adagrad_sparse_test_helper,

            

Reported by Pylint.

Missing class docstring
Error

Line: 15 Column: 1

              from hypothesis import HealthCheck, given, settings


class TestAdagrad(serial.SerializedTestCase):
    @given(
        inputs=hu.tensors(n=3),
        lr=st.floats(
            min_value=0.01, max_value=0.99, allow_nan=False, allow_infinity=False
        ),

            

Reported by Pylint.

Too many arguments (7/5)
Error

Line: 27 Column: 5

                      weight_decay=st.sampled_from([0.0, 0.1]),
        **hu.gcs
    )
    @settings(deadline=10000)
    def test_adagrad(self, inputs, lr, epsilon, weight_decay, gc, dc):
        param, momentum, grad = inputs
        momentum = np.abs(momentum)
        lr = np.array([lr], dtype=np.float32)


            

Reported by Pylint.

Argument name "gc" doesn't conform to snake_case naming style
Error

Line: 27 Column: 5

                      weight_decay=st.sampled_from([0.0, 0.1]),
        **hu.gcs
    )
    @settings(deadline=10000)
    def test_adagrad(self, inputs, lr, epsilon, weight_decay, gc, dc):
        param, momentum, grad = inputs
        momentum = np.abs(momentum)
        lr = np.array([lr], dtype=np.float32)


            

Reported by Pylint.

Missing function or method docstring
Error

Line: 27 Column: 5

                      weight_decay=st.sampled_from([0.0, 0.1]),
        **hu.gcs
    )
    @settings(deadline=10000)
    def test_adagrad(self, inputs, lr, epsilon, weight_decay, gc, dc):
        param, momentum, grad = inputs
        momentum = np.abs(momentum)
        lr = np.array([lr], dtype=np.float32)


            

Reported by Pylint.

caffe2/python/operator_test/affine_channel_op_test.py
45 issues
Unable to import 'hypothesis'
Error

Line: 8 Column: 1

              from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import given, settings
import hypothesis.strategies as st
import numpy as np


class TestAffineChannelOp(serial.SerializedTestCase):

            

Reported by Pylint.

Unable to import 'hypothesis.strategies'
Error

Line: 9 Column: 1

              import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import given, settings
import hypothesis.strategies as st
import numpy as np


class TestAffineChannelOp(serial.SerializedTestCase):
    def affine_channel_nchw_ref(self, X, scale, bias):

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              



from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import given, settings
import hypothesis.strategies as st

            

Reported by Pylint.

Missing class docstring
Error

Line: 13 Column: 1

              import numpy as np


class TestAffineChannelOp(serial.SerializedTestCase):
    def affine_channel_nchw_ref(self, X, scale, bias):
        dims = X.shape
        N = dims[0]
        C = dims[1]
        X = X.reshape(N, C, -1)

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 14 Column: 5

              

class TestAffineChannelOp(serial.SerializedTestCase):
    def affine_channel_nchw_ref(self, X, scale, bias):
        dims = X.shape
        N = dims[0]
        C = dims[1]
        X = X.reshape(N, C, -1)
        scale = scale.reshape(C, 1)

            

Reported by Pylint.

Method could be a function
Error

Line: 14 Column: 5

              

class TestAffineChannelOp(serial.SerializedTestCase):
    def affine_channel_nchw_ref(self, X, scale, bias):
        dims = X.shape
        N = dims[0]
        C = dims[1]
        X = X.reshape(N, C, -1)
        scale = scale.reshape(C, 1)

            

Reported by Pylint.

Argument name "X" doesn't conform to snake_case naming style
Error

Line: 14 Column: 5

              

class TestAffineChannelOp(serial.SerializedTestCase):
    def affine_channel_nchw_ref(self, X, scale, bias):
        dims = X.shape
        N = dims[0]
        C = dims[1]
        X = X.reshape(N, C, -1)
        scale = scale.reshape(C, 1)

            

Reported by Pylint.

Variable name "N" doesn't conform to snake_case naming style
Error

Line: 16 Column: 9

              class TestAffineChannelOp(serial.SerializedTestCase):
    def affine_channel_nchw_ref(self, X, scale, bias):
        dims = X.shape
        N = dims[0]
        C = dims[1]
        X = X.reshape(N, C, -1)
        scale = scale.reshape(C, 1)
        bias = bias.reshape(C, 1)
        Y = X * scale + bias

            

Reported by Pylint.

Variable name "C" doesn't conform to snake_case naming style
Error

Line: 17 Column: 9

                  def affine_channel_nchw_ref(self, X, scale, bias):
        dims = X.shape
        N = dims[0]
        C = dims[1]
        X = X.reshape(N, C, -1)
        scale = scale.reshape(C, 1)
        bias = bias.reshape(C, 1)
        Y = X * scale + bias
        return [Y.reshape(dims)]

            

Reported by Pylint.

Variable name "Y" doesn't conform to snake_case naming style
Error

Line: 21 Column: 9

                      X = X.reshape(N, C, -1)
        scale = scale.reshape(C, 1)
        bias = bias.reshape(C, 1)
        Y = X * scale + bias
        return [Y.reshape(dims)]

    def affine_channel_nhwc_ref(self, X, scale, bias):
        dims = X.shape
        N = dims[0]

            

Reported by Pylint.

caffe2/python/lstm_benchmark.py
44 issues
Use lazy % formatting in logging functions
Error

Line: 27 Column: 14

                  '''
    Fill a queue with input data
    '''
    log.info("Generating T={} sequence batches".format(T))

    generate_input_init_net = core.Net('generate_input_init')
    queue = generate_input_init_net.CreateBlobsQueue(
        [], "inputqueue", num_blobs=1, capacity=T,
    )

            

Reported by Pylint.

Redefining name 'args' from outer scope (line 332)
Error

Line: 67 Column: 18

                  return queue, label_queue, entry_counts


def create_model(args, queue, label_queue, input_shape):
    model = model_helper.ModelHelper(name="LSTM_bench")
    seq_lengths, target = \
        model.net.AddExternalInputs(
            'seq_lengths',
            'target',

            

Reported by Pylint.

Unused variable 'target'
Error

Line: 69 Column: 18

              
def create_model(args, queue, label_queue, input_shape):
    model = model_helper.ModelHelper(name="LSTM_bench")
    seq_lengths, target = \
        model.net.AddExternalInputs(
            'seq_lengths',
            'target',
        )


            

Reported by Pylint.

Unused variable 'last_state'
Error

Line: 94 Column: 33

                          )
            init_blobs.extend([hidden_init, cell_init])

        output, last_hidden, _, last_state = rnn_cell.LSTM(
            model=model,
            input_blob=input_blob,
            seq_lengths=seq_lengths,
            initial_states=init_blobs,
            dim_in=args.input_dim,

            

Reported by Pylint.

Possible unbalanced tuple unpacking with sequence defined at line 1838 of caffe2.python.rnn_cell: left side has 3 label(s), right side has 4 value(s)
Error

Line: 119 Column: 9

                      # can infer the dimensions.
        init_blobs = model.net.AddExternalInputs("hidden_init", "cell_init")
        model.param_init_net.ConstantFill([], input_blob, shape=input_shape)
        output, last_hidden, _ = rnn_cell.cudnn_LSTM(
            model=model,
            input_blob=input_blob,
            initial_states=init_blobs,
            dim_in=args.input_dim,
            dim_out=args.hidden_dim,

            

Reported by Pylint.

Unused variable 'softmax'
Error

Line: 133 Column: 5

                      assert False, "Unknown implementation"

    weights = model.net.UniformFill(labels, "weights")
    softmax, loss = model.net.SoftmaxWithLoss(
        [model.Flatten(output), labels, weights],
        ['softmax', 'loss'],
    )

    if not args.forward_only:

            

Reported by Pylint.

Redefining name 'args' from outer scope (line 332)
Error

Line: 163 Column: 16

                  return model, output


def Caffe2LSTM(args):
    T = args.data_size // args.batch_size

    input_blob_shape = [args.seq_length, args.batch_size, args.input_dim]
    queue, label_queue, entry_counts = generate_data(T // args.seq_length,
                                       input_blob_shape,

            

Reported by Pylint.

Unused variable 'output'
Error

Line: 177 Column: 12

                      np.array([args.seq_length] * args.batch_size, dtype=np.int32)
    )

    model, output = create_model(args, queue, label_queue, input_blob_shape)

    workspace.RunNetOnce(model.param_init_net)
    workspace.CreateNet(model.net)

    start_time = time.time()

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 193 Column: 18

                  if (args.gpu):
        log.info("Memory stats:")
        stats = utils.GetGPUMemoryUsageStats()
        log.info("GPU memory:\t{} MB".format(stats['max_total'] / 1024 / 1024))

    log.info("------ Starting benchmark ------")
    start_time = time.time()
    last_time = time.time()
    for iteration in range(1, num_iters, args.iters_to_report):

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 205 Column: 13

              
        new_time = time.time()
        log.info(
            "Iter: {} / {}. Entries Per Second: {}k.".format(
                iteration,
                num_iters,
                np.sum(entry_counts[iteration:iteration + iters_once]) /
                (new_time - last_time) // 100 / 10,
            )

            

Reported by Pylint.

torch/utils/data/dataset.py
44 issues
No name 'randperm' in module 'torch'
Error

Line: 18 Column: 1

              )

# No 'default_generator' in torch/__init__.pyi
from torch import default_generator, randperm
from torch._utils import _accumulate
from torch.utils.data._typing import _DataPipeMeta

from ... import Generator, Tensor


            

Reported by Pylint.

No name 'default_generator' in module 'torch'
Error

Line: 18 Column: 1

              )

# No 'default_generator' in torch/__init__.pyi
from torch import default_generator, randperm
from torch._utils import _accumulate
from torch.utils.data._typing import _DataPipeMeta

from ... import Generator, Tensor


            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 22 Column: 1

              from torch._utils import _accumulate
from torch.utils.data._typing import _DataPipeMeta

from ... import Generator, Tensor

T_co = TypeVar('T_co', covariant=True)
T = TypeVar('T')



            

Reported by Pylint.

Invalid metaclass '_DataPipeMeta' used
Error

Line: 95 Column: 1

                      cls.functions[function_name] = function


class IterableDataset(Dataset[T_co], metaclass=_DataPipeMeta):
    r"""An iterable Dataset.

    All datasets that represent an iterable of data samples should subclass it.
    Such form of datasets is particularly useful when data come from a stream.


            

Reported by Pylint.

IterableDataset.reduce_ex_hook is not callable
Error

Line: 219 Column: 24

                  def __reduce_ex__(self, *args, **kwargs):
        if IterableDataset.reduce_ex_hook is not None:
            try:
                return IterableDataset.reduce_ex_hook(self)
            except NotImplementedError:
                pass
        return super().__reduce_ex__(*args, **kwargs)

    @classmethod

            

Reported by Pylint.

Method '__getitem__' is abstract in class 'Dataset' but is not overridden
Error

Line: 303 Column: 1

                      return self.cumulative_sizes


class ChainDataset(IterableDataset):
    r"""Dataset for chaining multiple :class:`IterableDataset` s.

    This class is useful to assemble different existing dataset streams. The
    chaining operation is done on-the-fly, so concatenating large-scale
    datasets with this class will be efficient.

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import bisect
import functools
import warnings
from typing import (
    Callable,
    Dict,
    Generic,
    Iterable,
    Iterator,

            

Reported by Pylint.

Class name "T_co" doesn't conform to PascalCase naming style
Error

Line: 24 Column: 1

              
from ... import Generator, Tensor

T_co = TypeVar('T_co', covariant=True)
T = TypeVar('T')


class DataChunk(list, Generic[T]):
    def __init__(self, items):

            

Reported by Pylint.

Class name "T" doesn't conform to PascalCase naming style
Error

Line: 25 Column: 1

              from ... import Generator, Tensor

T_co = TypeVar('T_co', covariant=True)
T = TypeVar('T')


class DataChunk(list, Generic[T]):
    def __init__(self, items):
        super().__init__(items)

            

Reported by Pylint.

Missing class docstring
Error

Line: 28 Column: 1

              T = TypeVar('T')


class DataChunk(list, Generic[T]):
    def __init__(self, items):
        super().__init__(items)
        self.items = items

    def as_str(self, indent=''):

            

Reported by Pylint.

test/package/test_misc.py
44 issues
Unable to import 'torch.package'
Error

Line: 6 Column: 1

              from io import BytesIO
from textwrap import dedent

from torch.package import PackageExporter, PackageImporter, is_from_package
from torch.package.package_exporter import PackagingError
from torch.testing._internal.common_utils import run_tests

try:
    from .common import PackageTestCase

            

Reported by Pylint.

Unable to import 'torch.package.package_exporter'
Error

Line: 7 Column: 1

              from textwrap import dedent

from torch.package import PackageExporter, PackageImporter, is_from_package
from torch.package.package_exporter import PackagingError
from torch.testing._internal.common_utils import run_tests

try:
    from .common import PackageTestCase
except ImportError:

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 8 Column: 1

              
from torch.package import PackageExporter, PackageImporter, is_from_package
from torch.package.package_exporter import PackagingError
from torch.testing._internal.common_utils import run_tests

try:
    from .common import PackageTestCase
except ImportError:
    # Support the case where we run this file directly.

            

Reported by Pylint.

Unable to import 'module_a'
Error

Line: 68 Column: 13

                      )

        with PackageExporter(buffer) as he:
            import module_a
            import package_a
            import package_a.subpackage

            obj = package_a.subpackage.PackageASubpackageObject()
            he.intern("**")

            

Reported by Pylint.

Unable to import 'package_a'
Error

Line: 69 Column: 13

              
        with PackageExporter(buffer) as he:
            import module_a
            import package_a
            import package_a.subpackage

            obj = package_a.subpackage.PackageASubpackageObject()
            he.intern("**")
            he.save_module(module_a.__name__)

            

Reported by Pylint.

Unable to import 'package_a.subpackage'
Error

Line: 70 Column: 13

                      with PackageExporter(buffer) as he:
            import module_a
            import package_a
            import package_a.subpackage

            obj = package_a.subpackage.PackageASubpackageObject()
            he.intern("**")
            he.save_module(module_a.__name__)
            he.save_module(package_a.__name__)

            

Reported by Pylint.

Unable to import 'package_a.subpackage'
Error

Line: 106 Column: 13

                      """
        buffer = BytesIO()
        with PackageExporter(buffer) as he:
            import package_a.subpackage

            he.intern("**")
            obj = package_a.subpackage.PackageASubpackageObject()
            he.save_pickle("obj", "obj.pkl", obj)


            

Reported by Pylint.

Unable to import 'package_b'
Error

Line: 125 Column: 13

                      """

        with PackageExporter(BytesIO()) as he:
            import package_b

            he.extern("package_b.subpackage_1")
            he.mock("package_b.subpackage_2")
            he.intern("**")
            he.save_pickle("obj", "obj.pkl", package_b.PackageBObject(["a"]))

            

Reported by Pylint.

Unable to import 'package_b'
Error

Line: 141 Column: 17

              
        with self.assertRaises(PackagingError) as e:
            with PackageExporter(BytesIO()) as he:
                import package_b

                he.deny("package_b")
                he.save_pickle("obj", "obj.pkl", package_b.PackageBObject(["a"]))
                self.assertEqual(he.denied_modules(), ["package_b"])


            

Reported by Pylint.

Unable to import 'package_a.subpackage'
Error

Line: 149 Column: 9

              
    def test_is_from_package(self):
        """is_from_package should work for objects and modules"""
        import package_a.subpackage

        buffer = BytesIO()
        obj = package_a.subpackage.PackageASubpackageObject()

        with PackageExporter(buffer) as pe:

            

Reported by Pylint.

caffe2/python/operator_test/partition_ops_test.py
44 issues
Cell variable parts defined in loop
Error

Line: 56 Column: 34

              
            def sharding(x):
                # numpy has proper modulo op that yields non-negative results
                shards = (x[0] % parts).reshape([-1])
                out = []
                for i in range(parts):
                    for ind, v in enumerate(x):
                        suffix_shape = v.shape[len(x[0].shape):]
                        accum = []

            

Reported by Pylint.

Cell variable parts defined in loop
Error

Line: 58 Column: 32

                              # numpy has proper modulo op that yields non-negative results
                shards = (x[0] % parts).reshape([-1])
                out = []
                for i in range(parts):
                    for ind, v in enumerate(x):
                        suffix_shape = v.shape[len(x[0].shape):]
                        accum = []
                        data = v.reshape((-1, ) + suffix_shape)


            

Reported by Pylint.

Cell variable pack defined in loop
Error

Line: 64 Column: 28

                                      accum = []
                        data = v.reshape((-1, ) + suffix_shape)

                        if pack and ind == 0:
                            data = data // parts

                        for j, s in enumerate(shards):
                            if s == i:
                                accum.append(data[j])

            

Reported by Pylint.

Cell variable parts defined in loop
Error

Line: 65 Column: 44

                                      data = v.reshape((-1, ) + suffix_shape)

                        if pack and ind == 0:
                            data = data // parts

                        for j, s in enumerate(shards):
                            if s == i:
                                accum.append(data[j])


            

Reported by Pylint.

Cell variable suffix_shape defined in loop
Error

Line: 73 Column: 63

              
                        def join(a):
                            if not a:
                                return np.empty(shape=(0, ) + suffix_shape)
                            return np.stack(a)

                        out.append(join(accum))
                return out


            

Reported by Pylint.

Cell variable parts defined in loop
Error

Line: 140 Column: 34

              
            def sharding(x):
                # numpy has proper modulo op that yields non-negative results
                shards = (x[0] % parts).reshape([-1])
                out = []
                for i in range(parts):
                    idx = 0
                    sharded_lengths = np.zeros(elements)
                    for ind, length in enumerate(lengths):

            

Reported by Pylint.

Cell variable parts defined in loop
Error

Line: 142 Column: 32

                              # numpy has proper modulo op that yields non-negative results
                shards = (x[0] % parts).reshape([-1])
                out = []
                for i in range(parts):
                    idx = 0
                    sharded_lengths = np.zeros(elements)
                    for ind, length in enumerate(lengths):
                        for _ in range(length):
                            if shards[idx] == i:

            

Reported by Pylint.

Cell variable elements defined in loop
Error

Line: 144 Column: 48

                              out = []
                for i in range(parts):
                    idx = 0
                    sharded_lengths = np.zeros(elements)
                    for ind, length in enumerate(lengths):
                        for _ in range(length):
                            if shards[idx] == i:
                                sharded_lengths[ind] += 1
                            idx += 1

            

Reported by Pylint.

Cell variable lengths defined in loop
Error

Line: 145 Column: 50

                              for i in range(parts):
                    idx = 0
                    sharded_lengths = np.zeros(elements)
                    for ind, length in enumerate(lengths):
                        for _ in range(length):
                            if shards[idx] == i:
                                sharded_lengths[ind] += 1
                            idx += 1
                    out.append(sharded_lengths)

            

Reported by Pylint.

Cell variable pack defined in loop
Error

Line: 157 Column: 28

                                      accum = []
                        data = v.reshape((-1, ) + suffix_shape)

                        if pack and ind == 0:
                            data = data // parts

                        for j, s in enumerate(shards):
                            if s == i:
                                accum.append(data[j])

            

Reported by Pylint.

caffe2/python/control.py
44 issues
Using the global statement
Error

Line: 30 Column: 5

              

def _get_next_step_name(control_name, base_name):
    global _current_idx, _used_step_names
    concat_name = '%s/%s' % (base_name, control_name)
    next_name = concat_name
    while next_name in _used_step_names:
        next_name = '%s_%d' % (concat_name, _current_idx)
        _current_idx += 1

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 40 Column: 15

                  return next_name


def _MakeList(input):
    """ input is a tuple.
    Example:
    (a, b, c)   --> [a, b, c]
    (a)         --> [a]
    ([a, b, c]) --> [a, b, c]

            

Reported by Pylint.

Access to a protected member _attr_dict of a client class
Error

Line: 204 Column: 31

                      else:
            last_cond = merged_net.__getattr__(relation)([last_cond, curr_cond])
        # merge attributes
        for k, v in viewitems(condition_nets[i]._attr_dict):
            merged_net._attr_dict[k] += v

    merged_net.AddExternalOutput(last_cond)

    return merged_net

            

Reported by Pylint.

Access to a protected member _attr_dict of a client class
Error

Line: 205 Column: 13

                          last_cond = merged_net.__getattr__(relation)([last_cond, curr_cond])
        # merge attributes
        for k, v in viewitems(condition_nets[i]._attr_dict):
            merged_net._attr_dict[k] += v

    merged_net.AddExternalOutput(last_cond)

    return merged_net


            

Reported by Pylint.

Constant name "_current_idx" doesn't conform to UPPER_CASE naming style
Error

Line: 25 Column: 1

              
# Used to generate names of the steps created by the control functions.
# It is actually the internal index of these steps.
_current_idx = 1
_used_step_names = set()


def _get_next_step_name(control_name, base_name):
    global _current_idx, _used_step_names

            

Reported by Pylint.

Constant name "_current_idx" doesn't conform to UPPER_CASE naming style
Error

Line: 30 Column: 5

              

def _get_next_step_name(control_name, base_name):
    global _current_idx, _used_step_names
    concat_name = '%s/%s' % (base_name, control_name)
    next_name = concat_name
    while next_name in _used_step_names:
        next_name = '%s_%d' % (concat_name, _current_idx)
        _current_idx += 1

            

Reported by Pylint.

Constant name "_used_step_names" doesn't conform to UPPER_CASE naming style
Error

Line: 30 Column: 5

              

def _get_next_step_name(control_name, base_name):
    global _current_idx, _used_step_names
    concat_name = '%s/%s' % (base_name, control_name)
    next_name = concat_name
    while next_name in _used_step_names:
        next_name = '%s_%d' % (concat_name, _current_idx)
        _current_idx += 1

            

Reported by Pylint.

Function name "_MakeList" doesn't conform to snake_case naming style
Error

Line: 40 Column: 1

                  return next_name


def _MakeList(input):
    """ input is a tuple.
    Example:
    (a, b, c)   --> [a, b, c]
    (a)         --> [a]
    ([a, b, c]) --> [a, b, c]

            

Reported by Pylint.

Unnecessary "elif" after "raise"
Error

Line: 47 Column: 5

                  (a)         --> [a]
    ([a, b, c]) --> [a, b, c]
    """
    if len(input) == 0:
        raise ValueError(
            'input cannot be empty.')
    elif len(input) == 1:
        output = input[0]
        if not isinstance(output, list):

            

Reported by Pylint.

Function name "_IsNets" doesn't conform to snake_case naming style
Error

Line: 59 Column: 1

                  return output


def _IsNets(nets_or_steps):
    if isinstance(nets_or_steps, list):
        return all(isinstance(n, core.Net) for n in nets_or_steps)
    else:
        return isinstance(nets_or_steps, core.Net)


            

Reported by Pylint.

torch/quantization/ns/graph_matcher.py
44 issues
Attempted relative import beyond top-level package
Error

Line: 11 Column: 1

              from torch.fx.graph import Graph, Node

from torch.quantization.utils import getattr_from_fqn
from .ns_types import NSSubgraph, NSNodeTargetType
from .mappings import (
    get_base_name_to_sets_of_related_ops,
    get_unmatchable_types_map,
)
from .pattern_utils import (

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 12 Column: 1

              
from torch.quantization.utils import getattr_from_fqn
from .ns_types import NSSubgraph, NSNodeTargetType
from .mappings import (
    get_base_name_to_sets_of_related_ops,
    get_unmatchable_types_map,
)
from .pattern_utils import (
    get_type_a_related_to_b,

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 16 Column: 1

                  get_base_name_to_sets_of_related_ops,
    get_unmatchable_types_map,
)
from .pattern_utils import (
    get_type_a_related_to_b,
    get_reversed_fusions,
    end_node_matches_reversed_fusion,
)
from torch.quantization import (

            

Reported by Pylint.

Unused variable 'key'
Error

Line: 137 Column: 17

                          for inner_arg in arg:
                self._recursively_add_node_arg_to_stack(inner_arg)
        elif isinstance(arg, torch.fx.immutable_collections.immutable_dict):
            for key, value in arg.items():
                self._recursively_add_node_arg_to_stack(value)

    def _is_matchable(self, node: Node) -> bool:
        if node.op == 'call_function':
            return not (node.target in self.non_matchable_functions)

            

Reported by Pylint.

Unnecessary pass statement
Error

Line: 158 Column: 5

                  """
    Exception raised when two graphs cannot be matched.
    """
    pass

class SubgraphTypeRelationship(enum.Enum):
    # same type, known
    # example: F.linear and F.linear, or nn.Conv2d and nn.Conv2d
    EQUAL = enum.auto()

            

Reported by Pylint.

TODO(next): make this code handle matching by what is before the base op
Error

Line: 183 Column: 3

                  node_a = subgraph_a.base_op_node
    node_b = subgraph_b.base_op_node

    # TODO(next): make this code handle matching by what is before the base op
    if node_a.op != node_b.op:
        if not (
            node_a.op in ('call_function', 'call_method') and
            node_b.op in ('call_function', 'call_method')
        ):

            

Reported by Pylint.

TODO(future PR): check for matches start_op_node and base_op_node
Error

Line: 211 Column: 3

                          elif (not node_a_has_prev) and (not node_b_has_prev):
                return SubgraphTypeRelationship.EQUAL
            else:
                # TODO(future PR): check for matches start_op_node and base_op_node
                return SubgraphTypeRelationship.EQUAL

        if key in type_a_related_to_b:
            return SubgraphTypeRelationship.RELATED_BUT_NOT_EQUAL
        else:

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import collections
import enum

import torch
toq = torch.ops.quantized

from torch.fx import GraphModule
from torch.fx.graph import Graph, Node


            

Reported by Pylint.

Import "from torch.fx import GraphModule" should be placed at the top of the module
Error

Line: 7 Column: 1

              import torch
toq = torch.ops.quantized

from torch.fx import GraphModule
from torch.fx.graph import Graph, Node

from torch.quantization.utils import getattr_from_fqn
from .ns_types import NSSubgraph, NSNodeTargetType
from .mappings import (

            

Reported by Pylint.

Import "from torch.fx.graph import Graph, Node" should be placed at the top of the module
Error

Line: 8 Column: 1

              toq = torch.ops.quantized

from torch.fx import GraphModule
from torch.fx.graph import Graph, Node

from torch.quantization.utils import getattr_from_fqn
from .ns_types import NSSubgraph, NSNodeTargetType
from .mappings import (
    get_base_name_to_sets_of_related_ops,

            

Reported by Pylint.