The following issues were found

caffe2/python/ideep/spatial_bn_op_test.py
38 issues
Unable to import 'hypothesis'
Error

Line: 6 Column: 1

              


from hypothesis import given, settings
import hypothesis.strategies as st
import numpy as np
import unittest
from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu

            

Reported by Pylint.

Unable to import 'hypothesis.strategies'
Error

Line: 7 Column: 1

              

from hypothesis import given, settings
import hypothesis.strategies as st
import numpy as np
import unittest
from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.ideep_test_util as mu

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'use_mkldnn' member
Error

Line: 15 Column: 22

              import caffe2.python.ideep_test_util as mu


@unittest.skipIf(not workspace.C.use_mkldnn, "No MKLDNN support.")
class TestSpatialBN(hu.HypothesisTestCase):
    @given(size=st.integers(7, 10),
           input_channels=st.integers(7, 10),
           batch_size=st.integers(1, 3),
           seed=st.integers(0, 65535),

            

Reported by Pylint.

Unused argument 'seed'
Error

Line: 27 Column: 53

                         **mu.gcs)
    @settings(deadline=1000)
    def test_spatialbn_test_mode(
            self, size, input_channels, batch_size, seed, order, epsilon,
            inplace, gc, dc):
        op = core.CreateOperator(
            "SpatialBN",
            ["X", "scale", "bias", "mean", "var"],
            ["X" if inplace else "Y"],

            

Reported by Pylint.

Unused argument 'gc'
Error

Line: 28 Column: 22

                  @settings(deadline=1000)
    def test_spatialbn_test_mode(
            self, size, input_channels, batch_size, seed, order, epsilon,
            inplace, gc, dc):
        op = core.CreateOperator(
            "SpatialBN",
            ["X", "scale", "bias", "mean", "var"],
            ["X" if inplace else "Y"],
            order=order,

            

Reported by Pylint.

Unused variable 'reference_spatialbn_test'
Error

Line: 38 Column: 9

                          epsilon=epsilon
        )

        def reference_spatialbn_test(X, scale, bias, mean, var):
            if order == "NCHW":
                scale = scale[np.newaxis, :, np.newaxis, np.newaxis]
                bias = bias[np.newaxis, :, np.newaxis, np.newaxis]
                mean = mean[np.newaxis, :, np.newaxis, np.newaxis]
                var = var[np.newaxis, :, np.newaxis, np.newaxis]

            

Reported by Pylint.

Unused argument 'seed'
Error

Line: 68 Column: 53

                         inplace=st.sampled_from([True, False]),
           **mu.gcs)
    def test_spatialbn_train_mode(
            self, size, input_channels, batch_size, seed, order, epsilon,
            inplace, gc, dc):
        print("dc0: {}, dc1: {}".format(dc[0], dc[1]))
        op = core.CreateOperator(
            "SpatialBN",
            ["X", "scale", "bias", "running_mean", "running_var"],

            

Reported by Pylint.

Unused argument 'gc'
Error

Line: 69 Column: 22

                         **mu.gcs)
    def test_spatialbn_train_mode(
            self, size, input_channels, batch_size, seed, order, epsilon,
            inplace, gc, dc):
        print("dc0: {}, dc1: {}".format(dc[0], dc[1]))
        op = core.CreateOperator(
            "SpatialBN",
            ["X", "scale", "bias", "running_mean", "running_var"],
            ["X" if inplace else "Y",

            

Reported by Pylint.

TODO: It looks like IDEEP spatial_bn op outputs save_var (output[4])
Error

Line: 91 Column: 3

                      if order == "NHWC":
            X = X.swapaxes(1, 2).swapaxes(2, 3)

        # TODO: It looks like IDEEP spatial_bn op outputs save_var (output[4])
        # as the reciprocal of CPU op's output. Need to check back and add
        # output[4] for comparison
        self.assertDeviceChecks(dc, op, [X, scale, bias, running_mean, running_var],
            [0, 1, 2, 3])


            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 107 Column: 17

                  @settings(deadline=None, max_examples=50)
    def test_spatialbn_train_mode_gradient_check(
            self, size, input_channels, batch_size, seed, order, epsilon,
            gc, dc):
        op = core.CreateOperator(
            "SpatialBN",
            ["X", "scale", "bias", "mean", "var"],
            ["Y", "mean", "var", "saved_mean", "saved_var"],
            order=order,

            

Reported by Pylint.

benchmarks/operator_benchmark/pt/batchnorm_test.py
38 issues
Unable to import 'torch'
Error

Line: 3 Column: 1

              
import operator_benchmark as op_bench
import torch
import torch.nn.functional as F


"""Microbenchmarks for batchnorm operator."""

# Benchmark cudnn if available

            

Reported by Pylint.

Unable to import 'torch.nn.functional'
Error

Line: 4 Column: 1

              
import operator_benchmark as op_bench
import torch
import torch.nn.functional as F


"""Microbenchmarks for batchnorm operator."""

# Benchmark cudnn if available

            

Reported by Pylint.

Module 'operator_benchmark' has no 'config_list' member
Error

Line: 24 Column: 51

                      return [(*config, dict(cudnn=False)) for config in configs]


batchnorm_configs_short = cudnn_benchmark_configs(op_bench.config_list(
    attr_names=["M", "N", "K"],
    attrs=[
        [1, 256, 3136],
    ],
    cross_product_configs={

            

Reported by Pylint.

Module 'operator_benchmark' has no 'cross_product_configs' member
Error

Line: 36 Column: 50

                  tags=["short"]
))

batchnorm_configs_long = cudnn_benchmark_configs(op_bench.cross_product_configs(
    M=[2, 128],
    N=[8192, 2048],
    K=[1],
    device=['cpu', 'cuda'],
    training=[True, False],

            

Reported by Pylint.

Module 'operator_benchmark' has no 'TorchBenchmarkBase' member
Error

Line: 46 Column: 26

              ))


class BatchNormBenchmark(op_bench.TorchBenchmarkBase):
    def init(self, M, N, K, device, training, cudnn):
        self.inputs = {
            "input_one": torch.rand(M, N, K, device=device, requires_grad=self.auto_set()),
            "mean": torch.rand(N, device=device),
            "var": torch.rand(N, device=device),

            

Reported by Pylint.

Module 'operator_benchmark' has no 'generate_pt_test' member
Error

Line: 64 Column: 1

                          return F.batch_norm(input_one, mean, var, weight, bias, training)


op_bench.generate_pt_test(batchnorm_configs_short + batchnorm_configs_long, BatchNormBenchmark)
op_bench.generate_pt_gradient_test(batchnorm_configs_short + batchnorm_configs_long, BatchNormBenchmark)


batchnorm1d_configs_short = cudnn_benchmark_configs(op_bench.config_list(
    attr_names=["N", "C"],

            

Reported by Pylint.

Module 'operator_benchmark' has no 'generate_pt_gradient_test' member
Error

Line: 65 Column: 1

              

op_bench.generate_pt_test(batchnorm_configs_short + batchnorm_configs_long, BatchNormBenchmark)
op_bench.generate_pt_gradient_test(batchnorm_configs_short + batchnorm_configs_long, BatchNormBenchmark)


batchnorm1d_configs_short = cudnn_benchmark_configs(op_bench.config_list(
    attr_names=["N", "C"],
    attrs=[

            

Reported by Pylint.

Module 'operator_benchmark' has no 'config_list' member
Error

Line: 68 Column: 53

              op_bench.generate_pt_gradient_test(batchnorm_configs_short + batchnorm_configs_long, BatchNormBenchmark)


batchnorm1d_configs_short = cudnn_benchmark_configs(op_bench.config_list(
    attr_names=["N", "C"],
    attrs=[
        [3136, 256],
    ],
    cross_product_configs={

            

Reported by Pylint.

Module 'operator_benchmark' has no 'cross_product_configs' member
Error

Line: 80 Column: 52

                  tags=["short"]
))

batchnorm1d_configs_long = cudnn_benchmark_configs(op_bench.cross_product_configs(
    N=[2, 128],
    C=[8192, 2048],
    device=['cpu', 'cuda'],
    training=[True, False],
    tags=["long"]

            

Reported by Pylint.

Module 'operator_benchmark' has no 'TorchBenchmarkBase' member
Error

Line: 88 Column: 28

                  tags=["long"]
))

class BatchNorm1dBenchmark(op_bench.TorchBenchmarkBase):
    def init(self, N, C, device, training, cudnn):
        self.inputs = {
            "input_one": torch.rand(N, C, device=device, requires_grad=self.auto_set()),
            "mean": torch.rand(C, device=device),
            "var": torch.rand(C, device=device),

            

Reported by Pylint.

torch/nn/modules/lazy.py
38 issues
Unable to import 'typing_extensions'
Error

Line: 2 Column: 1

              import itertools
from typing_extensions import Protocol
import warnings

import torch
from ..parameter import is_lazy


class _LazyProtocol(Protocol):

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 6 Column: 1

              import warnings

import torch
from ..parameter import is_lazy


class _LazyProtocol(Protocol):
    """This is to avoid errors with mypy checks for
    The attributes in a mixin:

            

Reported by Pylint.

Unused argument 'hook'
Error

Line: 14 Column: 50

                  The attributes in a mixin:
    https://mypy.readthedocs.io/en/latest/more_types.html#mixin-classes
    """
    def _register_load_state_dict_pre_hook(self, hook):
        ...

    def register_forward_pre_hook(self, hook):
        ...


            

Reported by Pylint.

Unused argument 'hook'
Error

Line: 17 Column: 41

                  def _register_load_state_dict_pre_hook(self, hook):
        ...

    def register_forward_pre_hook(self, hook):
        ...

    def _lazy_load_hook(
            self, state_dict, prefix, local_metadata, strict,
            missing_keys, unexpected_keys, error_msgs):

            

Reported by Pylint.

Unused argument 'local_metadata'
Error

Line: 21 Column: 39

                      ...

    def _lazy_load_hook(
            self, state_dict, prefix, local_metadata, strict,
            missing_keys, unexpected_keys, error_msgs):
        ...

    def _get_name(self):
        ...

            

Reported by Pylint.

Unused argument 'state_dict'
Error

Line: 21 Column: 19

                      ...

    def _lazy_load_hook(
            self, state_dict, prefix, local_metadata, strict,
            missing_keys, unexpected_keys, error_msgs):
        ...

    def _get_name(self):
        ...

            

Reported by Pylint.

Unused argument 'strict'
Error

Line: 21 Column: 55

                      ...

    def _lazy_load_hook(
            self, state_dict, prefix, local_metadata, strict,
            missing_keys, unexpected_keys, error_msgs):
        ...

    def _get_name(self):
        ...

            

Reported by Pylint.

Unused argument 'prefix'
Error

Line: 21 Column: 31

                      ...

    def _lazy_load_hook(
            self, state_dict, prefix, local_metadata, strict,
            missing_keys, unexpected_keys, error_msgs):
        ...

    def _get_name(self):
        ...

            

Reported by Pylint.

Unused argument 'unexpected_keys'
Error

Line: 22 Column: 27

              
    def _lazy_load_hook(
            self, state_dict, prefix, local_metadata, strict,
            missing_keys, unexpected_keys, error_msgs):
        ...

    def _get_name(self):
        ...


            

Reported by Pylint.

Unused argument 'missing_keys'
Error

Line: 22 Column: 13

              
    def _lazy_load_hook(
            self, state_dict, prefix, local_metadata, strict,
            missing_keys, unexpected_keys, error_msgs):
        ...

    def _get_name(self):
        ...


            

Reported by Pylint.

benchmarks/operator_benchmark/benchmark_utils.py
38 issues
Undefined variable 'argparse'
Error

Line: 28 Column: 15

                  elif v.lower() in ('no', 'false', 'f', 'n', '0'):
        return False
    else:
        raise argparse.ArgumentTypeError('Boolean value expected.')

def numpy_random(dtype, *shapes):
    """ Return a random numpy tensor of the provided dtype.
        Args:
            shapes: int or a sequence of ints to defining the shapes of the tensor

            

Reported by Pylint.

String statement has no effect
Error

Line: 8 Column: 1

              import bisect


"""Performance microbenchmarks's utils.

This module contains utilities for writing microbenchmark tests.
"""

# Here are the reserved keywords in the benchmark suite

            

Reported by Pylint.

TODO: consider more complex/custom dynamic ranges for
Error

Line: 39 Column: 3

                      Return:
            numpy tensor of dtype
    """
    # TODO: consider more complex/custom dynamic ranges for
    # comprehensive test coverage.
    return np.random.rand(*shapes).astype(dtype)


def set_omp_threads(num_threads):

            

Reported by Pylint.

TODO(mingzhe0908) remove the conversion to list.
Error

Line: 113 Column: 3

                      tmp_results = [{key : value} for value in values]
        configs_attrs_list.append(tmp_results)

    # TODO(mingzhe0908) remove the conversion to list.
    # itertools.product produces an iterator that produces element on the fly
    # while converting to a list produces everything at the same time.
    generated_configs = list(itertools.product(*configs_attrs_list))
    return generated_configs


            

Reported by Pylint.

TODO(mingzhe0908):
Error

Line: 169 Column: 3

                  for inputs in configs['attrs']:
        tmp_result = [{configs['attr_names'][i] : input_value}
                      for i, input_value in enumerate(inputs)]
        # TODO(mingzhe0908):
        # If multiple 'tags' were provided, do they get concat?
        # If a config has both ['short', 'medium'], it should match
        # both 'short' and 'medium' tag-filter?
        tmp_result.append({'tags' : '_'.join(configs['tags'])})
        if cross_configs:

            

Reported by Pylint.

TODO(mingzhe09088): cache the results to avoid recalculation overhead
Error

Line: 212 Column: 3

                  def _random_sample(self, key, values, weights):
        """ given values and weights, this function randomly sample values based their weights
        """
        # TODO(mingzhe09088): cache the results to avoid recalculation overhead
        assert len(values) == len(weights)
        _distribution_func_vals = self._distribution_func(key, weights)
        x = random.random()
        idx = bisect.bisect(_distribution_func_vals, x)


            

Reported by Pylint.

Unused variable 'i'
Error

Line: 276 Column: 9

              
    configs_attrs_list = []
    randomsample = RandomSample(configs)
    for i in range(configs["total_samples"]):
        tmp_attr_list = randomsample.get_one_set_of_inputs()
        tmp_attr_list.append({"tags" : '_'.join(configs["tags"])})
        configs_attrs_list.append(tmp_attr_list)
    return configs_attrs_list


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import numpy as np
import itertools
import random
import os
import bisect


"""Performance microbenchmarks's utils.


            

Reported by Pylint.

standard import "import itertools" should be placed before "import numpy as np"
Error

Line: 2 Column: 1

              import numpy as np
import itertools
import random
import os
import bisect


"""Performance microbenchmarks's utils.


            

Reported by Pylint.

standard import "import random" should be placed before "import numpy as np"
Error

Line: 3 Column: 1

              import numpy as np
import itertools
import random
import os
import bisect


"""Performance microbenchmarks's utils.


            

Reported by Pylint.

torch/distributed/rpc/internal.py
38 issues
Unable to import 'torch._C._distributed_rpc'
Error

Line: 12 Column: 1

              
import torch
import torch.distributed as dist
from torch._C._distributed_rpc import _get_current_rpc_agent


# Thread local tensor tables to store tensors while pickling torch.Tensor
# objects
_thread_local_tensor_tables = threading.local()

            

Reported by Pylint.

Module 'torch.autograd' has no '_RecordFunction' member
Error

Line: 266 Column: 10

                  profile_key = "rpc_{}#{}({} -> {})".format(
        exec_type.value, str(func_name), current_worker_name, dest_worker_name
    )
    rf = torch.autograd._RecordFunction()  # type: ignore[attr-defined]
    torch.autograd._run_before_callbacks(rf, profile_key)  # type: ignore[attr-defined]
    return rf


PythonUDF = collections.namedtuple("PythonUDF", ["func", "args", "kwargs"])

            

Reported by Pylint.

Module 'torch.autograd' has no '_run_before_callbacks' member
Error

Line: 267 Column: 5

                      exec_type.value, str(func_name), current_worker_name, dest_worker_name
    )
    rf = torch.autograd._RecordFunction()  # type: ignore[attr-defined]
    torch.autograd._run_before_callbacks(rf, profile_key)  # type: ignore[attr-defined]
    return rf


PythonUDF = collections.namedtuple("PythonUDF", ["func", "args", "kwargs"])
RemoteException = collections.namedtuple("RemoteException", ["msg", "exception_type"])

            

Reported by Pylint.

Using the global statement
Error

Line: 54 Column: 9

              
    @classmethod
    def _tensor_receiver(cls, tensor_index):
        global _thread_local_tensor_tables
        return _thread_local_tensor_tables.recv_tables[tensor_index]

    def _tensor_reducer(self, tensor):
        global _thread_local_tensor_tables
        _thread_local_tensor_tables.send_tables.append(tensor)

            

Reported by Pylint.

Using the global statement
Error

Line: 58 Column: 9

                      return _thread_local_tensor_tables.recv_tables[tensor_index]

    def _tensor_reducer(self, tensor):
        global _thread_local_tensor_tables
        _thread_local_tensor_tables.send_tables.append(tensor)
        tensor_index = len(_thread_local_tensor_tables.send_tables) - 1
        return (_InternalRPCPickler._tensor_receiver, (tensor_index,))

    @classmethod

            

Reported by Pylint.

Access to a protected member _deserialize of a client class
Error

Line: 65 Column: 16

              
    @classmethod
    def _py_rref_receiver(cls, rref_fork_data):
        return dist.rpc.PyRRef._deserialize(rref_fork_data)

    def _py_rref_reducer(self, py_rref):
        rref_fork_data = py_rref._serialize()
        return (_InternalRPCPickler._py_rref_receiver, (rref_fork_data,))


            

Reported by Pylint.

Access to a protected member _serialize of a client class
Error

Line: 68 Column: 26

                      return dist.rpc.PyRRef._deserialize(rref_fork_data)

    def _py_rref_reducer(self, py_rref):
        rref_fork_data = py_rref._serialize()
        return (_InternalRPCPickler._py_rref_receiver, (rref_fork_data,))

    def _rref_reducer(self, rref):
        return self._py_rref_reducer(rref)


            

Reported by Pylint.

Using the global statement
Error

Line: 126 Column: 9

                          p.dispatch_table[class_name] = self._class_reducer_dict[class_name]  # type: ignore[index]

        # save _thread_local_tensor_tables.send_tables if it is in nested call
        global _thread_local_tensor_tables
        if hasattr(_thread_local_tensor_tables, "send_tables"):
            old_send_tables = _thread_local_tensor_tables.send_tables
        else:
            old_send_tables = None
        _thread_local_tensor_tables.send_tables = []

            

Reported by Pylint.

Using the global statement
Error

Line: 150 Column: 9

                      Deserialize binary string + tensor table to original obj
        """
        # save _thread_local_tensor_tables.recv_tables if it is in nested call
        global _thread_local_tensor_tables
        if hasattr(_thread_local_tensor_tables, "recv_tables"):
            old_recv_tables = _thread_local_tensor_tables.recv_tables
        else:
            old_recv_tables = None
        _thread_local_tensor_tables.recv_tables = tensor_table

            

Reported by Pylint.

Catching too general exception Exception
Error

Line: 205 Column: 12

                      if isinstance(python_udf, AttributeError):
            raise python_udf
        result = python_udf.func(*python_udf.args, **python_udf.kwargs)
    except Exception as e:
        # except str = exception info + traceback string
        except_str = (
            f"On {_get_current_rpc_agent().get_worker_info()}:\n"
            f"{repr(e)}\n{traceback.format_exc()}"
        )

            

Reported by Pylint.

caffe2/python/operator_test/gather_ranges_op_test.py
38 issues
Unable to import 'hypothesis'
Error

Line: 7 Column: 1

              import caffe2.python.serialized_test.serialized_test_util as serial
import numpy as np
from caffe2.python import core, workspace
from hypothesis import given, settings, strategies as st


def batched_boarders_and_data(
    data_min_size=5,
    data_max_size=10,

            

Reported by Pylint.

Redefining built-in 'range'
Error

Line: 114 Column: 13

                  output = []
    for example_ranges in ranges:
        length = 0
        for range in example_ranges:
            assert len(range) == 2
            output.extend(data[range[0] : range[0] + range[1]])
            length += range[1]
        lengths.append(length)
    return output, lengths

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 170 Column: 57

              class TestGatherRanges(serial.SerializedTestCase):
    @given(boarders_and_data=batched_boarders_and_data(), **hu.gcs_cpu_only)
    @settings(deadline=10000)
    def test_gather_ranges(self, boarders_and_data, gc, dc):
        boarders, data = boarders_and_data

        def boarders_to_range(boarders):
            assert len(boarders) == 2
            boarders = sorted(boarders)

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 191 Column: 59

              
    @given(tensor_splits=_tensor_splits(), **hu.gcs_cpu_only)
    @settings(deadline=10000)
    def test_gather_ranges_split(self, tensor_splits, gc, dc):
        data, ranges, lengths, _ = tensor_splits

        self.assertReferenceChecks(
            device_option=gc,
            op=core.CreateOperator(

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 207 Column: 68

                      )

    @given(tensor_splits=_tensor_splits(), **hu.gcs_cpu_only)
    def test_gather_ranges_with_key_split(self, tensor_splits, gc, dc):
        data, ranges, lengths, key = tensor_splits

        self.assertReferenceChecks(
            device_option=gc,
            op=core.CreateOperator(

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 237 Column: 57

              
    @given(tensor_splits=_bad_tensor_splits(), **hu.gcs_cpu_only)
    @settings(deadline=10000)
    def test_empty_range_check(self, tensor_splits, gc, dc):
        data, ranges, lengths, key = tensor_splits

        workspace.FeedBlob("data", data)
        workspace.FeedBlob("ranges", ranges)
        workspace.FeedBlob("key", key)

            

Reported by Pylint.

Unused argument 'gc'
Error

Line: 237 Column: 53

              
    @given(tensor_splits=_bad_tensor_splits(), **hu.gcs_cpu_only)
    @settings(deadline=10000)
    def test_empty_range_check(self, tensor_splits, gc, dc):
        data, ranges, lengths, key = tensor_splits

        workspace.FeedBlob("data", data)
        workspace.FeedBlob("ranges", ranges)
        workspace.FeedBlob("key", key)

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              

import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import numpy as np
from caffe2.python import core, workspace
from hypothesis import given, settings, strategies as st



            

Reported by Pylint.

Too many arguments (8/5)
Error

Line: 10 Column: 1

              from hypothesis import given, settings, strategies as st


def batched_boarders_and_data(
    data_min_size=5,
    data_max_size=10,
    examples_min_number=1,
    examples_max_number=4,
    example_min_size=1,

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 10 Column: 1

              from hypothesis import given, settings, strategies as st


def batched_boarders_and_data(
    data_min_size=5,
    data_max_size=10,
    examples_min_number=1,
    examples_max_number=4,
    example_min_size=1,

            

Reported by Pylint.

test/test_throughput_benchmark.py
37 issues
Unable to import 'torch'
Error

Line: 2 Column: 1

              
import torch
from torch.utils import ThroughputBenchmark
from torch.testing import assert_allclose

from torch.testing._internal.common_utils import run_tests, TestCase, TemporaryFileName

class TwoLayerNet(torch.jit.ScriptModule):
    def __init__(self, D_in, H, D_out):

            

Reported by Pylint.

Unable to import 'torch.utils'
Error

Line: 3 Column: 1

              
import torch
from torch.utils import ThroughputBenchmark
from torch.testing import assert_allclose

from torch.testing._internal.common_utils import run_tests, TestCase, TemporaryFileName

class TwoLayerNet(torch.jit.ScriptModule):
    def __init__(self, D_in, H, D_out):

            

Reported by Pylint.

Unable to import 'torch.testing'
Error

Line: 4 Column: 1

              
import torch
from torch.utils import ThroughputBenchmark
from torch.testing import assert_allclose

from torch.testing._internal.common_utils import run_tests, TestCase, TemporaryFileName

class TwoLayerNet(torch.jit.ScriptModule):
    def __init__(self, D_in, H, D_out):

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 6 Column: 1

              from torch.utils import ThroughputBenchmark
from torch.testing import assert_allclose

from torch.testing._internal.common_utils import run_tests, TestCase, TemporaryFileName

class TwoLayerNet(torch.jit.ScriptModule):
    def __init__(self, D_in, H, D_out):
        super(TwoLayerNet, self).__init__()
        self.linear1 = torch.nn.Linear(D_in, H)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 51 Column: 13

                          inputs.append([torch.randn(B, D_in), torch.randn(B, D_in)])
        bench = ThroughputBenchmark(module)

        for input in inputs:
            # can do both args and kwargs here
            bench.add_input(input[0], x2=input[1])

        for i in range(NUM_INPUTS):
            # or just unpack the list of inputs

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              
import torch
from torch.utils import ThroughputBenchmark
from torch.testing import assert_allclose

from torch.testing._internal.common_utils import run_tests, TestCase, TemporaryFileName

class TwoLayerNet(torch.jit.ScriptModule):
    def __init__(self, D_in, H, D_out):

            

Reported by Pylint.

Too few public methods (1/2)
Error

Line: 8 Column: 1

              
from torch.testing._internal.common_utils import run_tests, TestCase, TemporaryFileName

class TwoLayerNet(torch.jit.ScriptModule):
    def __init__(self, D_in, H, D_out):
        super(TwoLayerNet, self).__init__()
        self.linear1 = torch.nn.Linear(D_in, H)
        self.linear2 = torch.nn.Linear(2 * H, D_out)


            

Reported by Pylint.

Missing class docstring
Error

Line: 8 Column: 1

              
from torch.testing._internal.common_utils import run_tests, TestCase, TemporaryFileName

class TwoLayerNet(torch.jit.ScriptModule):
    def __init__(self, D_in, H, D_out):
        super(TwoLayerNet, self).__init__()
        self.linear1 = torch.nn.Linear(D_in, H)
        self.linear2 = torch.nn.Linear(2 * H, D_out)


            

Reported by Pylint.

Argument name "D_in" doesn't conform to snake_case naming style
Error

Line: 9 Column: 5

              from torch.testing._internal.common_utils import run_tests, TestCase, TemporaryFileName

class TwoLayerNet(torch.jit.ScriptModule):
    def __init__(self, D_in, H, D_out):
        super(TwoLayerNet, self).__init__()
        self.linear1 = torch.nn.Linear(D_in, H)
        self.linear2 = torch.nn.Linear(2 * H, D_out)

    @torch.jit.script_method

            

Reported by Pylint.

Argument name "H" doesn't conform to snake_case naming style
Error

Line: 9 Column: 5

              from torch.testing._internal.common_utils import run_tests, TestCase, TemporaryFileName

class TwoLayerNet(torch.jit.ScriptModule):
    def __init__(self, D_in, H, D_out):
        super(TwoLayerNet, self).__init__()
        self.linear1 = torch.nn.Linear(D_in, H)
        self.linear2 = torch.nn.Linear(2 * H, D_out)

    @torch.jit.script_method

            

Reported by Pylint.

torch/fx/experimental/unification/core.py
37 issues
Attempted relative import beyond top-level package
Error

Line: 4 Column: 1

              from collections.abc import Iterator  # type: ignore[import]
from functools import partial

from .unification_tools import assoc  # type: ignore[import]
from .utils import transitive_get as walk
from .variable import isvar
from .dispatch import dispatch

################

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 5 Column: 1

              from functools import partial

from .unification_tools import assoc  # type: ignore[import]
from .utils import transitive_get as walk
from .variable import isvar
from .dispatch import dispatch

################
# Reificiation #

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 6 Column: 1

              
from .unification_tools import assoc  # type: ignore[import]
from .utils import transitive_get as walk
from .variable import isvar
from .dispatch import dispatch

################
# Reificiation #
################

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 7 Column: 1

              from .unification_tools import assoc  # type: ignore[import]
from .utils import transitive_get as walk
from .variable import isvar
from .dispatch import dispatch

################
# Reificiation #
################


            

Reported by Pylint.

Too many positional arguments for function call
Error

Line: 64 Column: 13

                  if len(u) != len(v):
        return False
    for uu, vv in zip(u, v):  # avoiding recursion
        s = unify(uu, vv, s)
        if s is False:
            return False
    return s
#
# @dispatch((set, frozenset), (set, frozenset), dict)

            

Reported by Pylint.

function already defined line 96
Error

Line: 114 Column: 1

              unify

@dispatch(object, object)  # type: ignore[no-redef]
def unify(u, v):
    return unify(u, v, {})

            

Reported by Pylint.

Too many positional arguments for function call
Error

Line: 115 Column: 12

              
@dispatch(object, object)  # type: ignore[no-redef]
def unify(u, v):
    return unify(u, v, {})

            

Reported by Pylint.

Statement seems to have no effect
Error

Line: 17 Column: 1

              def _reify(t, s):
    return map(partial(reify, s=s), t)
    # return (reify(arg, s) for arg in t)
_reify

@dispatch(tuple, dict)  # type: ignore[no-redef]
def _reify(t, s):
    return tuple(reify(iter(t), s))
_reify

            

Reported by Pylint.

Statement seems to have no effect
Error

Line: 22 Column: 1

              @dispatch(tuple, dict)  # type: ignore[no-redef]
def _reify(t, s):
    return tuple(reify(iter(t), s))
_reify

@dispatch(list, dict)  # type: ignore[no-redef]
def _reify(t, s):
    return list(reify(iter(t), s))
_reify

            

Reported by Pylint.

Statement seems to have no effect
Error

Line: 27 Column: 1

              @dispatch(list, dict)  # type: ignore[no-redef]
def _reify(t, s):
    return list(reify(iter(t), s))
_reify

@dispatch(dict, dict)  # type: ignore[no-redef]
def _reify(d, s):
    return dict((k, reify(v, s)) for k, v in d.items())
_reify

            

Reported by Pylint.

caffe2/python/models/seq2seq/translate.py
37 issues
Instance of 'Seq2SeqModelCaffe2EnsembleDecoderBase' has no 'models' member
Error

Line: 56 Column: 13

                  def load_models(self):
        db_reader = 'reader'
        for model, scope_name in zip(
            self.models,
            self.decoder_scope_names,
        ):
            params_for_current_model = [
                param
                for param in self.model.GetAllParams()

            

Reported by Pylint.

Instance of 'Seq2SeqModelCaffe2EnsembleDecoderBase' has no 'decoder_scope_names' member
Error

Line: 57 Column: 13

                      db_reader = 'reader'
        for model, scope_name in zip(
            self.models,
            self.decoder_scope_names,
        ):
            params_for_current_model = [
                param
                for param in self.model.GetAllParams()
                if str(param).startswith(scope_name)

            

Reported by Pylint.

Instance of 'Seq2SeqModelCaffe2EnsembleDecoderBase' has no 'model' member
Error

Line: 61 Column: 30

                      ):
            params_for_current_model = [
                param
                for param in self.model.GetAllParams()
                if str(param).startswith(scope_name)
            ]
            assert workspace.RunOperatorOnce(core.CreateOperator(
                'CreateDB',
                [], [db_reader],

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 78 Column: 25

                              add_prefix=scope_name + '/',
                strip_prefix='gpu_0/',
            ))
            logger.info('Model {} is loaded from a checkpoint {}'.format(
                scope_name, self.get_model_file(model)))


class Seq2SeqModelCaffe2EnsembleDecoder(Seq2SeqModelCaffe2EnsembleDecoderBase):


            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 536 Column: 17

                      args.source_corpus,
        args.unk_threshold,
    )
    logger.info('Source vocab size {}'.format(len(source_vocab)))
    target_vocab = seq2seq_util.gen_vocab(
        args.target_corpus,
        args.unk_threshold,
    )
    inversed_target_vocab = {v: k for (k, v) in viewitems(target_vocab)}

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 542 Column: 17

                      args.unk_threshold,
    )
    inversed_target_vocab = {v: k for (k, v) in viewitems(target_vocab)}
    logger.info('Target vocab size {}'.format(len(target_vocab)))

    decoder = Seq2SeqModelCaffe2EnsembleDecoder(
        translate_params=dict(
            ensemble_models=[dict(
                source_vocab=source_vocab,

            

Reported by Pylint.

Unused variable 'alignment'
Error

Line: 562 Column: 22

                          line,
            source_vocab,
        )
        translation, alignment, _ = decoder.decode(
            numerized_source_sentence,
            2 * len(numerized_source_sentence) + 5,
        )
        print(' '.join([inversed_target_vocab[tid] for tid in translation]))


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              ## @package translate
# Module caffe2.python.models.seq2seq.translate





from abc import ABCMeta, abstractmethod
import argparse

            

Reported by Pylint.

standard import "import logging" should be placed before "from future.utils import viewitems"
Error

Line: 11 Column: 1

              from abc import ABCMeta, abstractmethod
import argparse
from future.utils import viewitems
import logging
import numpy as np
import sys

from caffe2.python import core, rnn_cell, workspace
from caffe2.python.models.seq2seq.beam_search import BeamSearchForwardOnly

            

Reported by Pylint.

standard import "import sys" should be placed before "from future.utils import viewitems"
Error

Line: 13 Column: 1

              from future.utils import viewitems
import logging
import numpy as np
import sys

from caffe2.python import core, rnn_cell, workspace
from caffe2.python.models.seq2seq.beam_search import BeamSearchForwardOnly
from caffe2.python.models.seq2seq.seq2seq_model_helper import Seq2SeqModelHelper
import caffe2.python.models.seq2seq.seq2seq_util as seq2seq_util

            

Reported by Pylint.

test/test_jit_fuser_legacy.py
37 issues
Unused import MiLSTMCell from wildcard import
Error

Line: 3 Column: 1

              import sys
sys.argv.append("--jit_executor=legacy")
from test_jit_fuser import *  # noqa: F403

if __name__ == '__main__':
    run_tests()

            

Reported by Pylint.

Unused import LSTMCellF from wildcard import
Error

Line: 3 Column: 1

              import sys
sys.argv.append("--jit_executor=legacy")
from test_jit_fuser import *  # noqa: F403

if __name__ == '__main__':
    run_tests()

            

Reported by Pylint.

Unused import LSTMCellC from wildcard import
Error

Line: 3 Column: 1

              import sys
sys.argv.append("--jit_executor=legacy")
from test_jit_fuser import *  # noqa: F403

if __name__ == '__main__':
    run_tests()

            

Reported by Pylint.

Unused import shell from wildcard import
Error

Line: 3 Column: 1

              import sys
sys.argv.append("--jit_executor=legacy")
from test_jit_fuser import *  # noqa: F403

if __name__ == '__main__':
    run_tests()

            

Reported by Pylint.

Unused import JitTestCase from wildcard import
Error

Line: 3 Column: 1

              import sys
sys.argv.append("--jit_executor=legacy")
from test_jit_fuser import *  # noqa: F403

if __name__ == '__main__':
    run_tests()

            

Reported by Pylint.

Unused import get_milstm_inputs from wildcard import
Error

Line: 3 Column: 1

              import sys
sys.argv.append("--jit_executor=legacy")
from test_jit_fuser import *  # noqa: F403

if __name__ == '__main__':
    run_tests()

            

Reported by Pylint.

Unused import get_lstm_inputs from wildcard import
Error

Line: 3 Column: 1

              import sys
sys.argv.append("--jit_executor=legacy")
from test_jit_fuser import *  # noqa: F403

if __name__ == '__main__':
    run_tests()

            

Reported by Pylint.

Unused import enable_cpu_fuser from wildcard import
Error

Line: 3 Column: 1

              import sys
sys.argv.append("--jit_executor=legacy")
from test_jit_fuser import *  # noqa: F403

if __name__ == '__main__':
    run_tests()

            

Reported by Pylint.

Unused import all_backward_graphs from wildcard import
Error

Line: 3 Column: 1

              import sys
sys.argv.append("--jit_executor=legacy")
from test_jit_fuser import *  # noqa: F403

if __name__ == '__main__':
    run_tests()

            

Reported by Pylint.

Unused import backward_graph from wildcard import
Error

Line: 3 Column: 1

              import sys
sys.argv.append("--jit_executor=legacy")
from test_jit_fuser import *  # noqa: F403

if __name__ == '__main__':
    run_tests()

            

Reported by Pylint.