The following issues were found

test/test_cpp_extensions_aot.py
62 issues
Unable to import 'torch.testing._internal.common_utils'
Error

Line: 4 Column: 1

              import os
import unittest

import torch.testing._internal.common_utils as common
from torch.testing._internal.common_utils import IS_WINDOWS
from torch.testing._internal.common_cuda import TEST_CUDA
import torch
import torch.backends.cudnn
import torch.utils.cpp_extension

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 5 Column: 1

              import unittest

import torch.testing._internal.common_utils as common
from torch.testing._internal.common_utils import IS_WINDOWS
from torch.testing._internal.common_cuda import TEST_CUDA
import torch
import torch.backends.cudnn
import torch.utils.cpp_extension


            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_cuda'
Error

Line: 6 Column: 1

              
import torch.testing._internal.common_utils as common
from torch.testing._internal.common_utils import IS_WINDOWS
from torch.testing._internal.common_cuda import TEST_CUDA
import torch
import torch.backends.cudnn
import torch.utils.cpp_extension

try:

            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 7 Column: 1

              import torch.testing._internal.common_utils as common
from torch.testing._internal.common_utils import IS_WINDOWS
from torch.testing._internal.common_cuda import TEST_CUDA
import torch
import torch.backends.cudnn
import torch.utils.cpp_extension

try:
    import pytest

            

Reported by Pylint.

Unable to import 'torch.backends.cudnn'
Error

Line: 8 Column: 1

              from torch.testing._internal.common_utils import IS_WINDOWS
from torch.testing._internal.common_cuda import TEST_CUDA
import torch
import torch.backends.cudnn
import torch.utils.cpp_extension

try:
    import pytest
    HAS_PYTEST = True

            

Reported by Pylint.

Unable to import 'torch.utils.cpp_extension'
Error

Line: 9 Column: 1

              from torch.testing._internal.common_cuda import TEST_CUDA
import torch
import torch.backends.cudnn
import torch.utils.cpp_extension

try:
    import pytest
    HAS_PYTEST = True
except ImportError as e:

            

Reported by Pylint.

Unable to import 'torch_test_cpp_extension.cuda'
Error

Line: 73 Column: 9

              
    @unittest.skipIf(not TEST_CUDA, "CUDA not found")
    def test_cuda_extension(self):
        import torch_test_cpp_extension.cuda as cuda_extension

        x = torch.zeros(100, device="cuda", dtype=torch.float32)
        y = torch.zeros(100, device="cuda", dtype=torch.float32)

        z = cuda_extension.sigmoid_add(x, y).cpu()

            

Reported by Pylint.

Unable to import 'torch_test_cpp_extension.torch_library'
Error

Line: 192 Column: 9

              class TestTorchLibrary(common.TestCase):

    def test_torch_library(self):
        import torch_test_cpp_extension.torch_library  # noqa: F401

        def f(a: bool, b: bool):
            return torch.ops.torch_library.logical_and(a, b)

        self.assertTrue(f(True, True))

            

Reported by Pylint.

TODO: Rewrite these tests so that they can be collected via pytest without
Error

Line: 17 Column: 3

              except ImportError as e:
    HAS_PYTEST = False

# TODO: Rewrite these tests so that they can be collected via pytest without
# using run_test.py
try:
    if HAS_PYTEST:
        cpp_extension = pytest.importorskip("torch_test_cpp_extension.cpp")
        msnpu_extension = pytest.importorskip("torch_test_cpp_extension.msnpu")

            

Reported by Pylint.

Unused variable 'a'
Error

Line: 105 Column: 9

              
class TestMSNPUTensor(common.TestCase):
    def test_unregistered(self):
        a = torch.arange(0, 10, device='cpu')
        with self.assertRaisesRegex(RuntimeError, "Could not run"):
            b = torch.arange(0, 10, device='msnpu')

    def test_zeros(self):
        a = torch.empty(5, 5, device='cpu')

            

Reported by Pylint.

caffe2/contrib/fakelowp/test/test_fc_nnpi_fp16.py
62 issues
Unable to import 'caffe2.python.fakelowp.init_shared_libs'
Error

Line: 4 Column: 1

              import numpy as np
import unittest

import caffe2.python.fakelowp.init_shared_libs  # noqa
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace

            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 5 Column: 1

              import unittest

import caffe2.python.fakelowp.init_shared_libs  # noqa
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net

            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 6 Column: 1

              
import caffe2.python.fakelowp.init_shared_libs  # noqa
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info

            

Reported by Pylint.

Unable to import 'caffe2.proto'
Error

Line: 7 Column: 1

              import caffe2.python.fakelowp.init_shared_libs  # noqa
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime

            

Reported by Pylint.

Unable to import 'caffe2.python'
Error

Line: 8 Column: 1

              from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
import caffe2.python.serialized_test.serialized_test_util as serial

            

Reported by Pylint.

Unable to import 'caffe2.python'
Error

Line: 9 Column: 1

              from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
import caffe2.python.serialized_test.serialized_test_util as serial


            

Reported by Pylint.

Unable to import 'caffe2.python.onnx.onnxifi'
Error

Line: 10 Column: 1

              from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
import caffe2.python.serialized_test.serialized_test_util as serial

core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])

            

Reported by Pylint.

Unable to import 'caffe2.python.fakelowp.test_utils'
Error

Line: 11 Column: 1

              from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
import caffe2.python.serialized_test.serialized_test_util as serial

core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])


            

Reported by Pylint.

Unable to import 'caffe2.python.serialized_test.serialized_test_util'
Error

Line: 13 Column: 1

              from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
import caffe2.python.serialized_test.serialized_test_util as serial

core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])

GLOW_MATMUL_RTOL = 0


            

Reported by Pylint.

Unused import caffe2.python.fakelowp.init_shared_libs
Error

Line: 4 Column: 1

              import numpy as np
import unittest

import caffe2.python.fakelowp.init_shared_libs  # noqa
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace

            

Reported by Pylint.

caffe2/python/operator_test/pack_ops_test.py
62 issues
Unable to import 'hypothesis'
Error

Line: 10 Column: 1

              import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial

from hypothesis import given, settings
from hypothesis import strategies as st
import numpy as np
import time



            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 11 Column: 1

              import caffe2.python.serialized_test.serialized_test_util as serial

from hypothesis import given, settings
from hypothesis import strategies as st
import numpy as np
import time


class TestTensorPackOps(serial.SerializedTestCase):

            

Reported by Pylint.

Redefining built-in 'len'
Error

Line: 28 Column: 17

                              max_length = np.max(lengths)
            start = 0
            for idx in range(np.size(lengths)):
                len = lengths[idx] if max_length >= lengths[idx] else max_length
                chunk = data[start : start + len]
                pad_length = max_length - len

                # ((0, pad_length), (0, 0)) says add pad_length rows of padding
                # below chunk and 0 rows of padding elsewhere

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 67 Column: 58

                  )
    @settings(deadline=None, max_examples=50)
    def test_pack_with_max_length_ops(
        self, num_seq, cell_size, max_length_buffer, gc, dc
    ):
        # create data
        lengths = np.arange(num_seq, dtype=np.int32) + 1
        num_cell = np.sum(lengths)
        data = np.zeros(num_cell * cell_size, dtype=np.float32)

            

Reported by Pylint.

Unused variable 'i'
Error

Line: 120 Column: 17

                              return data
            output = None
            start = 0
            for i, length in enumerate(lengths):
                new_len = max_length if length > max_length else length
                chunk = data[start: start + new_len]
                if output is None:
                    output = chunk
                else:

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 139 Column: 53

                      **hu.gcs
    )
    @settings(deadline=10000)
    def test_pack_ops(self, num_seq, cell_size, gc, dc):
        # create data
        lengths = np.arange(num_seq, dtype=np.int32) + 1
        num_cell = np.sum(lengths)
        data = np.zeros(num_cell * cell_size, dtype=np.float32)
        left = np.cumsum(np.arange(num_seq) * cell_size)

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 187 Column: 37

                  @given(
        **hu.gcs_cpu_only
    )
    def test_pack_ops_str(self, gc, dc):
        # GPU does not support string. Test CPU implementation only.
        workspace.FeedBlob('l', np.array([1, 2, 3], dtype=np.int64))
        strs = np.array([
            ["a", "a"],
            ["b", "b"],

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 273 Column: 38

                      assert(result[0, -1, 0] == 0)

    @given(**hu.gcs)
    def test_presence_mask(self, gc, dc):
        lengths = np.array([1, 2, 3], dtype=np.int32)
        data = np.array(
            [
                [1.0, 1.0], [2.0, 2.0], [2.0, 2.0], [3.0, 3.0], [3.0, 3.0],
                [3.0, 3.0]

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 303 Column: 9

              
        output = workspace.FetchBlob('t')
        expected_output_shape = (3, 3, 2)
        self.assertEquals(output.shape, expected_output_shape)

        presence_mask = workspace.FetchBlob('p')
        expected_presence_mask = np.array(
            [[True, False, False], [True, True, False], [True, True, True]],
            dtype=np.bool

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 326 Column: 9

              
        output = workspace.FetchBlob('p')
        expected_output_shape = (0, 0)
        self.assertEquals(output.shape, expected_output_shape)

    @given(**hu.gcs_cpu_only)
    @settings(deadline=10000)
    def test_out_of_bounds(self, gc, dc):
        # Copy pasted from test_pack_ops but with 3 changed to 4

            

Reported by Pylint.

torch/quantization/_numeric_suite.py
62 issues
Attempted relative import beyond top-level package
Error

Line: 8 Column: 1

              from torch.quantization import prepare
from typing import Dict, List, Optional, Any, Union, Callable, Set

from .quantization_mappings import (
    get_default_compare_output_module_list,
)

NON_LEAF_MODULE_TO_ADD_OBSERVER_ALLOW_LIST = {
    nnqd.Linear,

            

Reported by Pylint.

Module 'torch' has no 'quint8' member
Error

Line: 171 Column: 22

                      # We only insert observer if the op is quantized with static quantization,
        # which is identified by activation_observer.dtype == quint8.  This is needed
        # when attaching Logger as observer for FX mode
        self.dtype = torch.quint8

    def forward(self, x):
        pass



            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 75 Column: 5

                      a dictionary with two keys 'float' and 'quantized', containing the float and
        quantized weights
    """
    torch._C._log_api_usage_once("quantization_api._numeric_suite.compare_weights")
    weight_dict: Dict[str, Dict] = {}
    for key in quantized_dict:
        match_key = _find_match(float_dict, key, "weight")
        if match_key is not None:
            weight_dict[key] = {}

            

Reported by Pylint.

Access to a protected member _log_api_usage_once of a client class
Error

Line: 75 Column: 5

                      a dictionary with two keys 'float' and 'quantized', containing the float and
        quantized weights
    """
    torch._C._log_api_usage_once("quantization_api._numeric_suite.compare_weights")
    weight_dict: Dict[str, Dict] = {}
    for key in quantized_dict:
        match_key = _find_match(float_dict, key, "weight")
        if match_key is not None:
            weight_dict[key] = {}

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 154 Column: 5

                  Return:
        target_dict: the dictionary used to save all logger stats
    """
    torch._C._log_api_usage_once("quantization_api._numeric_suite.get_logger_dict")

    target_dict: Dict[str, Dict] = {}
    _get_logger_dict_helper(mod, target_dict, prefix)
    return target_dict


            

Reported by Pylint.

Access to a protected member _log_api_usage_once of a client class
Error

Line: 154 Column: 5

                  Return:
        target_dict: the dictionary used to save all logger stats
    """
    torch._C._log_api_usage_once("quantization_api._numeric_suite.get_logger_dict")

    target_dict: Dict[str, Dict] = {}
    _get_logger_dict_helper(mod, target_dict, prefix)
    return target_dict


            

Reported by Pylint.

Parameters differ from overridden 'forward' method
Error

Line: 187 Column: 5

                      self.stats["float"] = []
        self.stats["quantized"] = []

    def forward(self, x, y):
        if len(x) > 1:
            x = x[0]
        if len(y) > 1:
            y = y[0]
        self.stats["quantized"].append(x.detach())

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 316 Column: 5

                      logger_cls: type of logger to be used in shadow module to process the outputs of
            quantized module and its float shadow module
    """
    torch._C._log_api_usage_once("quantization_api._numeric_suite.prepare_model_with_stubs")

    float_module_children = {}
    for name, mod in float_module.named_children():
        float_module_children[name] = mod


            

Reported by Pylint.

Access to a protected member _log_api_usage_once of a client class
Error

Line: 316 Column: 5

                      logger_cls: type of logger to be used in shadow module to process the outputs of
            quantized module and its float shadow module
    """
    torch._C._log_api_usage_once("quantization_api._numeric_suite.prepare_model_with_stubs")

    float_module_children = {}
    for name, mod in float_module.named_children():
        float_module_children[name] = mod


            

Reported by Pylint.

Access to a protected member _modules of a client class
Error

Line: 339 Column: 9

                          reassign[name] = Shadow(mod, float_mod, logger_cls)

    for key, value in reassign.items():
        q_module._modules[key] = value

def _is_identical_module_type(mod1, mod2):
    # Compare if two modules have the same dtype
    mod1_module_types = [type(mod) for mod in mod1.modules()]
    mod2_module_types = [type(mod) for mod in mod2.modules()]

            

Reported by Pylint.

benchmarks/tensorexpr/reduction.py
62 issues
Attempted relative import beyond top-level package
Error

Line: 1 Column: 1

              from . import benchmark


class ReduceBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, case, M, N, K, skip_input_transform):
        super().__init__(mode, device, dtype)
        self.case = case
        self.M = M
        self.N = N

            

Reported by Pylint.

Bad first argument 'DynamicReduce2DInnerBench' given to super()
Error

Line: 270 Column: 25

                      return [parent_config[1:]]

    def config(self):
        parent_config = super(DynamicReduce2DInnerBench, self).config()
        return parent_config[1:]

    @staticmethod
    def module():
        return "reduce2d_dynamic_outer"

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from . import benchmark


class ReduceBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, case, M, N, K, skip_input_transform):
        super().__init__(mode, device, dtype)
        self.case = case
        self.M = M
        self.N = N

            

Reported by Pylint.

Missing class docstring
Error

Line: 4 Column: 1

              from . import benchmark


class ReduceBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, case, M, N, K, skip_input_transform):
        super().__init__(mode, device, dtype)
        self.case = case
        self.M = M
        self.N = N

            

Reported by Pylint.

Argument name "N" doesn't conform to snake_case naming style
Error

Line: 5 Column: 5

              

class ReduceBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, case, M, N, K, skip_input_transform):
        super().__init__(mode, device, dtype)
        self.case = case
        self.M = M
        self.N = N
        self.K = K

            

Reported by Pylint.

Argument name "K" doesn't conform to snake_case naming style
Error

Line: 5 Column: 5

              

class ReduceBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, case, M, N, K, skip_input_transform):
        super().__init__(mode, device, dtype)
        self.case = case
        self.M = M
        self.N = N
        self.K = K

            

Reported by Pylint.

Too many arguments (9/5)
Error

Line: 5 Column: 5

              

class ReduceBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, case, M, N, K, skip_input_transform):
        super().__init__(mode, device, dtype)
        self.case = case
        self.M = M
        self.N = N
        self.K = K

            

Reported by Pylint.

Argument name "M" doesn't conform to snake_case naming style
Error

Line: 5 Column: 5

              

class ReduceBench(benchmark.Benchmark):
    def __init__(self, mode, device, dtype, case, M, N, K, skip_input_transform):
        super().__init__(mode, device, dtype)
        self.case = case
        self.M = M
        self.N = N
        self.K = K

            

Reported by Pylint.

Attribute name "M" doesn't conform to snake_case naming style
Error

Line: 8 Column: 9

                  def __init__(self, mode, device, dtype, case, M, N, K, skip_input_transform):
        super().__init__(mode, device, dtype)
        self.case = case
        self.M = M
        self.N = N
        self.K = K
        self._set_skip_input_transform(skip_input_transform)

        self.inputs = [self.randn(

            

Reported by Pylint.

Attribute name "N" doesn't conform to snake_case naming style
Error

Line: 9 Column: 9

                      super().__init__(mode, device, dtype)
        self.case = case
        self.M = M
        self.N = N
        self.K = K
        self._set_skip_input_transform(skip_input_transform)

        self.inputs = [self.randn(
            [M, N, K], device=device, dtype=dtype, requires_grad=self.requires_grad

            

Reported by Pylint.

tools/codegen/gen_backend_stubs.py
62 issues
Use of unsafe yaml load. Allows instantiation of arbitrary objects. Consider yaml.safe_load().
Security criptography

Line: 35
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b506_yaml_load.html

                  }

    with open(backend_yaml_path, 'r') as f:
        yaml_values = yaml.load(f, Loader=YamlLoader)
    assert isinstance(yaml_values, dict)

    valid_keys = ['backend', 'cpp_namespace', 'extra_headers', 'supported', 'autograd']

    backend = yaml_values.pop('backend', None)

            

Reported by Bandit.

TODO: allow structured external backends later.
Error

Line: 65 Column: 3

                          assert op_name in native_functions_map, f"Found an invalid operator name: {op_name}"
            # See Note [External Backends Follow Dispatcher API]
            kernel_name = dispatcher.name(native_functions_map[op_name].func)
            # TODO: allow structured external backends later.
            m = BackendMetadata(kernel=kernel_name, structured=False)
            metadata[op_name] = m
        # TODO: currently hardcoding the fact that XLA implements out/inplace in terms of functional ops,
        # this should eventually be toggleable per-backend.
        return BackendIndex(

            

Reported by Pylint.

TODO: currently hardcoding the fact that XLA implements out/inplace in terms of functional ops,
Error

Line: 68 Column: 3

                          # TODO: allow structured external backends later.
            m = BackendMetadata(kernel=kernel_name, structured=False)
            metadata[op_name] = m
        # TODO: currently hardcoding the fact that XLA implements out/inplace in terms of functional ops,
        # this should eventually be toggleable per-backend.
        return BackendIndex(
            dispatch_key=dispatch_key,
            use_out_as_primary=False,
            external=True,

            

Reported by Pylint.

Consider explicitly re-raising using the 'from' keyword
Error

Line: 129 Column: 9

                      with open(kernel_defn_file_path, 'r') as f:
            backend_defns = f.read()
    except IOError:
        raise AssertionError(f'Unable to read from the specified impl_path file: {kernel_defn_file_path}')

    class_name: Optional[str] = backend_indices[backend_key].native_function_class_name()
    assert class_name is not None

    expected_backend_op_names: List[OperatorName] = \

            

Reported by Pylint.

TODO: handle cases when yaml contains zero ops properly in a later PR.
Error

Line: 201 Column: 3

                  selector = SelectiveBuilder.get_nop_selector()


    # TODO: handle cases when yaml contains zero ops properly in a later PR.
    if backend_key is not None and autograd_key is not None:
        backend_dispatch_key: DispatchKey = backend_key
        autograd_dispatch_key: DispatchKey = autograd_key
        class_name = backend_indices[backend_dispatch_key].native_function_class_name()


            

Reported by Pylint.

Cell variable dispatch_key defined in loop
Error

Line: 233 Column: 32

                              'legacy_th_headers': '',
                'external_backend_headers': f'#include "{output_dir}/{backend_key}NativeFunctions.h"',
                'namespaced_headers': '',
                'DispatchKey': dispatch_key,
                'dispatch_namespace': dispatch_key.lower(),
                'dispatch_namespaced_definitions': list(concatMap(
                    dest.RegisterDispatchKey(
                        backend_indices[dispatch_key],
                        Target.NAMESPACED_DEFINITION,

            

Reported by Pylint.

Cell variable dispatch_key defined in loop
Error

Line: 234 Column: 39

                              'external_backend_headers': f'#include "{output_dir}/{backend_key}NativeFunctions.h"',
                'namespaced_headers': '',
                'DispatchKey': dispatch_key,
                'dispatch_namespace': dispatch_key.lower(),
                'dispatch_namespaced_definitions': list(concatMap(
                    dest.RegisterDispatchKey(
                        backend_indices[dispatch_key],
                        Target.NAMESPACED_DEFINITION,
                        selector,

            

Reported by Pylint.

Cell variable dispatch_key defined in loop
Error

Line: 237 Column: 41

                              'dispatch_namespace': dispatch_key.lower(),
                'dispatch_namespaced_definitions': list(concatMap(
                    dest.RegisterDispatchKey(
                        backend_indices[dispatch_key],
                        Target.NAMESPACED_DEFINITION,
                        selector,
                        rocm=False,
                        cpp_namespace=cpp_namespace,
                        class_method_name=f'{backend_dispatch_key}NativeFunctions'),

            

Reported by Pylint.

Cell variable dispatch_key defined in loop
Error

Line: 247 Column: 41

                              )),
                'dispatch_anonymous_definitions': list(concatMap(
                    dest.RegisterDispatchKey(
                        backend_indices[dispatch_key],
                        Target.ANONYMOUS_DEFINITION,
                        selector,
                        rocm=False,
                        cpp_namespace=cpp_namespace,
                        class_method_name=f'{backend_dispatch_key}NativeFunctions'),

            

Reported by Pylint.

Cell variable dispatch_key defined in loop
Error

Line: 257 Column: 41

                              )),
                'dispatch_registrations': list(concatMap(
                    dest.RegisterDispatchKey(
                        backend_indices[dispatch_key],
                        Target.REGISTRATION,
                        selector,
                        rocm=False,
                        cpp_namespace=cpp_namespace,
                        class_method_name=f'{backend_dispatch_key}NativeFunctions'),

            

Reported by Pylint.

test/jit/test_export_modes.py
61 issues
Unable to import 'torch'
Error

Line: 7 Column: 1

              import sys
import tempfile

import torch
import torch.nn as nn
from torch.onnx import OperatorExportTypes
from torch.autograd import Variable

# Make the helper files in test/ importable

            

Reported by Pylint.

Unable to import 'torch.nn'
Error

Line: 8 Column: 1

              import tempfile

import torch
import torch.nn as nn
from torch.onnx import OperatorExportTypes
from torch.autograd import Variable

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

            

Reported by Pylint.

Unable to import 'torch.onnx'
Error

Line: 9 Column: 1

              
import torch
import torch.nn as nn
from torch.onnx import OperatorExportTypes
from torch.autograd import Variable

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)

            

Reported by Pylint.

Unable to import 'torch.autograd'
Error

Line: 10 Column: 1

              import torch
import torch.nn as nn
from torch.onnx import OperatorExportTypes
from torch.autograd import Variable

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase

            

Reported by Pylint.

Unable to import 'torch.testing._internal.jit_utils'
Error

Line: 15 Column: 1

              # Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase
from torch.testing._internal.common_utils import skipIfNoLapack

if __name__ == '__main__':
    raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
                       "\tpython test/test_jit.py TESTNAME\n\n"

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 16 Column: 1

              pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase
from torch.testing._internal.common_utils import skipIfNoLapack

if __name__ == '__main__':
    raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
                       "\tpython test/test_jit.py TESTNAME\n\n"
                       "instead.")

            

Reported by Pylint.

Useless super delegation in method '__init__'
Error

Line: 26 Column: 9

              # Smoke tests for export methods
class TestExportModes(JitTestCase):
    class MyModel(nn.Module):
        def __init__(self):
            super(TestExportModes.MyModel, self).__init__()

        def forward(self, x):
            return x.transpose(0, 1)


            

Reported by Pylint.

Access to a protected member _export of a client class
Error

Line: 36 Column: 9

                      torch_model = TestExportModes.MyModel()
        fake_input = Variable(torch.randn(1, 1, 224, 224), requires_grad=True)
        f = io.BytesIO()
        torch.onnx._export(torch_model, (fake_input), f, verbose=False,
                           export_type=torch.onnx.ExportTypes.PROTOBUF_FILE)

    def test_zipfile(self):
        torch_model = TestExportModes.MyModel()
        fake_input = Variable(torch.randn(1, 1, 224, 224), requires_grad=True)

            

Reported by Pylint.

Access to a protected member _export of a client class
Error

Line: 43 Column: 9

                      torch_model = TestExportModes.MyModel()
        fake_input = Variable(torch.randn(1, 1, 224, 224), requires_grad=True)
        f = io.BytesIO()
        torch.onnx._export(torch_model, (fake_input), f, verbose=False,
                           export_type=torch.onnx.ExportTypes.ZIP_ARCHIVE)

    def test_compressed_zipfile(self):
        torch_model = TestExportModes.MyModel()
        fake_input = Variable(torch.randn(1, 1, 224, 224), requires_grad=True)

            

Reported by Pylint.

Access to a protected member _export of a client class
Error

Line: 50 Column: 9

                      torch_model = TestExportModes.MyModel()
        fake_input = Variable(torch.randn(1, 1, 224, 224), requires_grad=True)
        f = io.BytesIO()
        torch.onnx._export(torch_model, (fake_input), f, verbose=False,
                           export_type=torch.onnx.ExportTypes.COMPRESSED_ZIP_ARCHIVE)

    def test_directory(self):
        torch_model = TestExportModes.MyModel()
        fake_input = Variable(torch.randn(1, 1, 224, 224), requires_grad=True)

            

Reported by Pylint.

caffe2/contrib/fakelowp/test/test_sls_8bit_nnpi_fp16.py
61 issues
Unable to import 'caffe2.python.fakelowp.init_shared_libs'
Error

Line: 5 Column: 1

              from typing import Dict, Any

# Must happen before importing caffe2.python.*
import caffe2.python.fakelowp.init_shared_libs  # noqa
import datetime
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2

            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 8 Column: 1

              import caffe2.python.fakelowp.init_shared_libs  # noqa
import datetime
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info

            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 9 Column: 1

              import datetime
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial

            

Reported by Pylint.

Unable to import 'caffe2.proto'
Error

Line: 10 Column: 1

              import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial


            

Reported by Pylint.

Unable to import 'caffe2.python'
Error

Line: 11 Column: 1

              from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial

workspace.GlobalInit(

            

Reported by Pylint.

Unable to import 'caffe2.python.onnx.onnxifi'
Error

Line: 12 Column: 1

              from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial

workspace.GlobalInit(
    [

            

Reported by Pylint.

Unable to import 'caffe2.python.fakelowp.test_utils'
Error

Line: 13 Column: 1

              from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial

workspace.GlobalInit(
    [
        "caffe2",

            

Reported by Pylint.

Unable to import 'caffe2.python.serialized_test.serialized_test_util'
Error

Line: 14 Column: 1

              from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial

workspace.GlobalInit(
    [
        "caffe2",
        "--glow_global_fp16=1",

            

Reported by Pylint.

Unused import caffe2.python.fakelowp.init_shared_libs
Error

Line: 5 Column: 1

              from typing import Dict, Any

# Must happen before importing caffe2.python.*
import caffe2.python.fakelowp.init_shared_libs  # noqa
import datetime
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import unittest
from typing import Dict, Any

# Must happen before importing caffe2.python.*
import caffe2.python.fakelowp.init_shared_libs  # noqa
import datetime
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st

            

Reported by Pylint.

torch/utils/data/datapipes/utils/decoder.py
61 issues
Module 'torch' has no 'tensor' member; maybe 'Tensor'?
Error

Line: 170 Column: 28

              
                if etype == "uint8":
                    result = np.array(result.transpose(2, 0, 1))
                    return torch.tensor(result)
                else:
                    result = np.array(result.transpose(2, 0, 1))
                    return torch.tensor(result) / 255.0
            return None


            

Reported by Pylint.

Module 'torch' has no 'tensor' member; maybe 'Tensor'?
Error

Line: 173 Column: 28

                                  return torch.tensor(result)
                else:
                    result = np.array(result.transpose(2, 0, 1))
                    return torch.tensor(result) / 255.0
            return None

def imagehandler(imagespec):
    return ImageHandler(imagespec)


            

Reported by Pylint.

Pickle and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue.
Security blacklist

Line: 35
Suggestion: https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b301-pickle

                      return json.loads(data)

    if extension in "pyd pickle".split():
        return pickle.loads(data)

    if extension in "pt".split():
        stream = io.BytesIO(data)
        return torch.load(stream)


            

Reported by Bandit.

Unused variable 'e'
Error

Line: 138 Column: 9

              
        try:
            import numpy as np
        except ImportError as e:
            raise ModuleNotFoundError("Package `numpy` is required to be installed for default image decoder."
                                      "Please use `pip install numpy` to install the package")

        try:
            import PIL.Image

            

Reported by Pylint.

Consider explicitly re-raising using the 'from' keyword
Error

Line: 139 Column: 13

                      try:
            import numpy as np
        except ImportError as e:
            raise ModuleNotFoundError("Package `numpy` is required to be installed for default image decoder."
                                      "Please use `pip install numpy` to install the package")

        try:
            import PIL.Image
        except ImportError as e:

            

Reported by Pylint.

Consider explicitly re-raising using the 'from' keyword
Error

Line: 145 Column: 13

                      try:
            import PIL.Image
        except ImportError as e:
            raise ModuleNotFoundError("Package `PIL` is required to be installed for default image decoder."
                                      "Please use `pip install Pillow` to install the package")

        imagespec = self.imagespec
        atype, etype, mode = imagespecs[imagespec]


            

Reported by Pylint.

Unused variable 'e'
Error

Line: 189 Column: 5

              
    try:
        import torchvision.io
    except ImportError as e:
        raise ModuleNotFoundError("Package `torchvision` is required to be installed for default video file loader."
                                  "Please use `pip install torchvision` or `conda install torchvision -c pytorch`"
                                  "to install the package")

    with tempfile.TemporaryDirectory() as dirname:

            

Reported by Pylint.

Consider explicitly re-raising using the 'from' keyword
Error

Line: 190 Column: 9

                  try:
        import torchvision.io
    except ImportError as e:
        raise ModuleNotFoundError("Package `torchvision` is required to be installed for default video file loader."
                                  "Please use `pip install torchvision` or `conda install torchvision -c pytorch`"
                                  "to install the package")

    with tempfile.TemporaryDirectory() as dirname:
        fname = os.path.join(dirname, f"file.{extension}")

            

Reported by Pylint.

Unused variable 'e'
Error

Line: 210 Column: 5

              
    try:
        import torchaudio  # type: ignore[import]
    except ImportError as e:
        raise ModuleNotFoundError("Package `torchaudio` is required to be installed for default audio file loader."
                                  "Please use `pip install torchaudio` or `conda install torchaudio -c pytorch`"
                                  "to install the package")

    with tempfile.TemporaryDirectory() as dirname:

            

Reported by Pylint.

Consider explicitly re-raising using the 'from' keyword
Error

Line: 211 Column: 9

                  try:
        import torchaudio  # type: ignore[import]
    except ImportError as e:
        raise ModuleNotFoundError("Package `torchaudio` is required to be installed for default audio file loader."
                                  "Please use `pip install torchaudio` or `conda install torchaudio -c pytorch`"
                                  "to install the package")

    with tempfile.TemporaryDirectory() as dirname:
        fname = os.path.join(dirname, f"file.{extension}")

            

Reported by Pylint.

caffe2/quantization/server/utils.py
61 issues
No name 'fb' in module 'caffe2.python'
Error

Line: 9 Column: 1

              
import numpy as np
from caffe2.python import core, utils
from caffe2.python.fb import hardcode_scale_zp  # type: ignore[import]


logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)


            

Reported by Pylint.

Unable to import 'caffe2.python.fb'
Error

Line: 9 Column: 1

              
import numpy as np
from caffe2.python import core, utils
from caffe2.python.fb import hardcode_scale_zp  # type: ignore[import]


logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)


            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 49 Column: 30

                      bn = net.op[j]
        if bn.type != "SpatialBN" or (len(uses) > 1 and conv.output[0] != bn.output[0]):
            if bn.type == "SpatialBN":
                logger.debug("Can't fuse if more than one user {}".format(uses))
            # Can't fuse if more than one user unless SpatialBN is inplace
            # An example of inplace SpatialBN where we want to allow multiple uses:
            # x = Conv(...)
            # ... // no interferring use or def of x (will be checked below)
            # x = SpatialBN(x, ...)

            

Reported by Pylint.

Exception arguments suggest string formatting might be intended
Error

Line: 180 Column: 17

                      )
        if begin_op_index is None:
            if any(op.type == "SpatialBN" for op in next_net.op) and not ignore_failure:
                raise Exception(
                    "Model contains SpatialBN op after fusion: %s", next_net
                )
            return (next_net, next_params, removed_tensors)
        net, params, removed_tensors = (next_net, next_params, removed_tensors)


            

Reported by Pylint.

Assigning the same variable 'removed_tensors' to itself
Error

Line: 184 Column: 22

                                  "Model contains SpatialBN op after fusion: %s", next_net
                )
            return (next_net, next_params, removed_tensors)
        net, params, removed_tensors = (next_net, next_params, removed_tensors)


def fuse_first_scale(net, params, removed_tensors):
    net = copy.deepcopy(net)
    params = copy.deepcopy(params)

            

Reported by Pylint.

Unused argument 'ignore_failure'
Error

Line: 229 Column: 29

                  return net, params, removed_tensors


def fuse_scale(net, params, ignore_failure):
    # Run until we hit a fixed point
    removed_tensors = []
    while True:
        (next_net, next_params, removed_tensors) = fuse_first_scale(
            net, params, removed_tensors

            

Reported by Pylint.

Assigning the same variable 'removed_tensors' to itself
Error

Line: 238 Column: 22

                      )
        if len(next_net.op) == len(net.op):
            return (next_net, next_params, removed_tensors)
        net, params, removed_tensors = (next_net, next_params, removed_tensors)


def fuse_first_relu(net, begin_op_index, ignore_op_with_output=None):
    net = copy.deepcopy(net)


            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 261 Column: 30

                      if relu.type != "Relu" or len(uses) > 1 and conv.output[0] != relu.output[0]:
            # Can't fuse if more than one user unless Relu is inplace
            if relu.type == "Relu":
                logger.debug("Can't fuse if more than one user {}".format(uses))
            continue

        # There shouldn't be any def of conv.output[0] and any use or def of relu.output[0] between conv and relu
        if any(
            blob in net.op[k].input or blob in net.op[k].output

            

Reported by Pylint.

Exception arguments suggest string formatting might be intended
Error

Line: 298 Column: 17

                      )
        if begin_op_index is None:
            if any(op.type == "Relu" for op in next_net.op) and not ignore_failure:
                raise Exception("Model contains Relu op after fusion: %s", next_net)
            return next_net
        net = next_net


def last_producer(ops, blob):

            

Reported by Pylint.

Exception arguments suggest string formatting might be intended
Error

Line: 307 Column: 5

                  for (i, op) in reversed(list(enumerate(ops))):
        if op.output[0] == blob:
            return i
    raise ValueError("Failed to find last producer of blob, %s", blob)


def swap_first_concat_relu(net, ignore_op_with_output=None):
    net = copy.deepcopy(net)


            

Reported by Pylint.