The following issues were found

scripts/model_zoo/update-models-from-caffe2.py
77 issues
Unable to import 'onnx.backend'
Error

Line: 3 Column: 1

              #! /usr/bin/env python3

import onnx.backend

import argparse
import caffe2.python.workspace as c2_workspace
import glob
import json
import numpy as np

            

Reported by Pylint.

Unable to import 'caffe2.python.workspace'
Error

Line: 6 Column: 1

              import onnx.backend

import argparse
import caffe2.python.workspace as c2_workspace
import glob
import json
import numpy as np
import onnx
import caffe2.python.onnx.frontend

            

Reported by Pylint.

Unable to import 'onnx'
Error

Line: 10 Column: 1

              import glob
import json
import numpy as np
import onnx
import caffe2.python.onnx.frontend
import caffe2.python.onnx.backend
import os
import shutil
import tarfile

            

Reported by Pylint.

Unable to import 'caffe2.python.onnx.frontend'
Error

Line: 11 Column: 1

              import json
import numpy as np
import onnx
import caffe2.python.onnx.frontend
import caffe2.python.onnx.backend
import os
import shutil
import tarfile
import tempfile

            

Reported by Pylint.

Unable to import 'caffe2.python.onnx.backend'
Error

Line: 12 Column: 1

              import numpy as np
import onnx
import caffe2.python.onnx.frontend
import caffe2.python.onnx.backend
import os
import shutil
import tarfile
import tempfile


            

Reported by Pylint.

Unable to import 'boto3'
Error

Line: 18 Column: 1

              import tarfile
import tempfile

import boto3

from six.moves.urllib.request import urlretrieve

from caffe2.python.models.download import downloadFromURLToFile, getURLFromName, deleteDirectory
from caffe2.proto import caffe2_pb2

            

Reported by Pylint.

Unable to import 'caffe2.python.models.download'
Error

Line: 22 Column: 1

              
from six.moves.urllib.request import urlretrieve

from caffe2.python.models.download import downloadFromURLToFile, getURLFromName, deleteDirectory
from caffe2.proto import caffe2_pb2
from onnx import numpy_helper


"""A script converting Caffe2 models to ONNX, and updating ONNX model zoos.

            

Reported by Pylint.

Unable to import 'caffe2.proto'
Error

Line: 23 Column: 1

              from six.moves.urllib.request import urlretrieve

from caffe2.python.models.download import downloadFromURLToFile, getURLFromName, deleteDirectory
from caffe2.proto import caffe2_pb2
from onnx import numpy_helper


"""A script converting Caffe2 models to ONNX, and updating ONNX model zoos.


            

Reported by Pylint.

Unable to import 'onnx'
Error

Line: 24 Column: 1

              
from caffe2.python.models.download import downloadFromURLToFile, getURLFromName, deleteDirectory
from caffe2.proto import caffe2_pb2
from onnx import numpy_helper


"""A script converting Caffe2 models to ONNX, and updating ONNX model zoos.

Arguments:

            

Reported by Pylint.

The raise statement is not inside an except clause
Error

Line: 160 Column: 9

                  elif tensor_type.elem_type == onnx.TensorProto.INT:
        type = np.int32
    else:
        raise
    array = np.random.rand(*shape).astype(type)
    return array


def generate_test_input_data(onnx_model, scale):

            

Reported by Pylint.

test/jit/test_ignore_context_manager.py
77 issues
Unable to import 'torch'
Error

Line: 5 Column: 1

              import sys
import unittest

import torch

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase

            

Reported by Pylint.

Unable to import 'torch.testing._internal.jit_utils'
Error

Line: 10 Column: 1

              # Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase
from torch.jit.frontend import _IS_ASTUNPARSE_INSTALLED

if __name__ == "__main__":
    raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
                       "\tpython test/test_jit.py TESTNAME\n\n"

            

Reported by Pylint.

Unable to import 'torch.jit.frontend'
Error

Line: 11 Column: 1

              pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase
from torch.jit.frontend import _IS_ASTUNPARSE_INSTALLED

if __name__ == "__main__":
    raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
                       "\tpython test/test_jit.py TESTNAME\n\n"
                       "instead.")

            

Reported by Pylint.

Useless super delegation in method '__init__'
Error

Line: 22 Column: 13

                  @unittest.skipUnless(_IS_ASTUNPARSE_INSTALLED, "astunparse package is required")
    def test_with_ignore_context_manager_with_inp_out(self):
        class A(torch.nn.Module):
            def __init__(self):
                super(A, self).__init__()

            def forward(self):
                a: int = 4
                b: int = 5

            

Reported by Pylint.

Access to a protected member _IgnoreContextManager of a client class
Error

Line: 30 Column: 22

                              b: int = 5
                c: int = 0
                d: int = 6
                with torch.jit._IgnoreContextManager(a="inp:int", b="inp:int", c="out:int", d="out:int"):
                    l = [2 for i in range(a) if i > 2]
                    c = l[0] + a + b
                    d = 9
                return c + d
        model = A()

            

Reported by Pylint.

Useless super delegation in method '__init__'
Error

Line: 41 Column: 13

                      self.assertEqual(s(), 20)

        class B(torch.nn.Module):
            def __init__(self):
                super(B, self).__init__()

            def forward(self):
                a: int = 4
                b: int = 5

            

Reported by Pylint.

Access to a protected member _IgnoreContextManager of a client class
Error

Line: 48 Column: 22

                              a: int = 4
                b: int = 5
                c: int = 0
                with torch.jit._IgnoreContextManager(a="inp:int", b="inp:int", c="out:int"):
                    l = [2 for i in range(a) if i > 2]
                    c = l[0] + a + b
                return c
        model = B()
        s = torch.jit.script(model)

            

Reported by Pylint.

Useless super delegation in method '__init__'
Error

Line: 58 Column: 13

                      self.assertEqual(s(), model())

        class C(torch.nn.Module):
            def __init__(self):
                super(C, self).__init__()

            def forward(self):
                a: int = 4
                b: int = 5

            

Reported by Pylint.

Access to a protected member _IgnoreContextManager of a client class
Error

Line: 64 Column: 22

                          def forward(self):
                a: int = 4
                b: int = 5
                with torch.jit._IgnoreContextManager(a="inp:int", b="out:int"):
                    l = [2 for i in range(a) if i > 2]
                    b = l[0] + a
                return b
        model = C()
        s = torch.jit.script(model)

            

Reported by Pylint.

Useless super delegation in method '__init__'
Error

Line: 76 Column: 13

                  @unittest.skipUnless(_IS_ASTUNPARSE_INSTALLED, "astunparse package is required")
    def test_with_ignore_context_manager_with_just_inp(self):
        class A(torch.nn.Module):
            def __init__(self):
                super(A, self).__init__()

            def forward(self):
                a: int = 4
                b: int = 5

            

Reported by Pylint.

caffe2/python/model_helper.py
77 issues
Module 'caffe2.python._import_c_extension' has no 'nearby_opnames' member
Error

Line: 434 Column: 26

                          raise AttributeError(
                'Method ' + op_type + ' is not a registered operator.' +
                ' Did you mean: [' +
                ','.join(workspace.C.nearby_opnames(op_type)) + ']'
            )
        if op_type not in _known_working_ops:
            if not self.allow_not_known_ops:
                raise AttributeError(
                    "Operator {} is not known to be safe".format(op_type))

            

Reported by Pylint.

Access to a protected member _NAMESCOPE_SEPARATOR of a client class
Error

Line: 278 Column: 52

                  def _NormalizeNamescope(namescope):
        if namescope is None:
            return scope.CurrentNameScope()
        elif namescope == '' or namescope.endswith(scope._NAMESCOPE_SEPARATOR):
            return namescope
        else:
            return namescope + scope._NAMESCOPE_SEPARATOR

    def GetParams(self, namescope=None, top_scope=False):

            

Reported by Pylint.

Access to a protected member _NAMESCOPE_SEPARATOR of a client class
Error

Line: 281 Column: 32

                      elif namescope == '' or namescope.endswith(scope._NAMESCOPE_SEPARATOR):
            return namescope
        else:
            return namescope + scope._NAMESCOPE_SEPARATOR

    def GetParams(self, namescope=None, top_scope=False):
        '''
        Returns the params in current namescope
        '''

            

Reported by Pylint.

Unused argument 'top_scope'
Error

Line: 283 Column: 41

                      else:
            return namescope + scope._NAMESCOPE_SEPARATOR

    def GetParams(self, namescope=None, top_scope=False):
        '''
        Returns the params in current namescope
        '''
        namescope = ModelHelper._NormalizeNamescope(namescope)


            

Reported by Pylint.

Attribute 'grad_map' defined outside __init__
Error

Line: 316 Column: 9

                      self.Validate()

        self.gradient_ops_added = True
        self.grad_map = self.net.AddGradientOperators(*args, **kwargs)
        self.param_to_grad = self.get_param_to_grad(self.params)

        # Populate ParameterInfo for all parameters if missing
        # and add gradient blob information. So optimizers can use it
        for param, grad in self.param_to_grad.items():

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 441 Column: 29

                              raise AttributeError(
                    "Operator {} is not known to be safe".format(op_type))

            logging.warning("You are creating an op that the ModelHelper "
                            "does not recognize: {}.".format(op_type))
        return self.net.__getattr__(op_type)

    def __dir__(self):
        return sorted(set(chain(

            

Reported by Pylint.

Consider explicitly re-raising using the 'from' keyword
Error

Line: 544 Column: 9

                          ]
        )
    except ValueError:
        raise Exception("No ops with input={}".format(input_blobs))
    try:
        last_op_with_output = max(
            [
                j for j in range(len(ops))
                if output_blobs.intersection(ops[j].output)

            

Reported by Pylint.

Consider explicitly re-raising using the 'from' keyword
Error

Line: 553 Column: 9

                          ]
        )
    except ValueError:
        raise Exception("No ops with output={}".format(output_blobs))

    def validate_op(op):
        # Check that the op does not have is_test = 0 set. This is a common
        # pitfall with SpatialBN op, at lest.
        for arg in op.arg:

            

Reported by Pylint.

TODO: when standard argument type for "nets" is introduced,
Error

Line: 582 Column: 3

                      if known_blobs.issuperset(op.input):

            # Special handling for recurrent nets
            # TODO: when standard argument type for "nets" is introduced,
            # this can be more general
            if op.type == 'RecurrentNetwork':
                for arg in op.arg:
                    if arg.name == 'backward_step_net':
                        arg.ClearField(str('n'))

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 621 Column: 17

              
        else:
            logging.debug(
                "Op {} had unknown inputs: {}".format(
                    op.type, set(op.input).difference(known_blobs)
                )
            )

    # Predictor net's external inputs and outputs include only those

            

Reported by Pylint.

caffe2/contrib/fakelowp/test/test_op_nnpi_fp16.py
76 issues
Unable to import 'caffe2.python.fakelowp.init_shared_libs'
Error

Line: 3 Column: 1

              import numpy as np

import caffe2.python.fakelowp.init_shared_libs  # noqa
import datetime
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace

            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 5 Column: 1

              
import caffe2.python.fakelowp.init_shared_libs  # noqa
import datetime
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net

            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 6 Column: 1

              import caffe2.python.fakelowp.init_shared_libs  # noqa
import datetime
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info

            

Reported by Pylint.

Unable to import 'caffe2.proto'
Error

Line: 7 Column: 1

              import datetime
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
from caffe2.python.fakelowp.test_utils import compute_ulp_error

            

Reported by Pylint.

Unable to import 'caffe2.python'
Error

Line: 8 Column: 1

              from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
from caffe2.python.fakelowp.test_utils import compute_ulp_error
import caffe2.python.serialized_test.serialized_test_util as serial

            

Reported by Pylint.

Unable to import 'caffe2.python'
Error

Line: 9 Column: 1

              from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
from caffe2.python.fakelowp.test_utils import compute_ulp_error
import caffe2.python.serialized_test.serialized_test_util as serial


            

Reported by Pylint.

Unable to import 'caffe2.python.onnx.onnxifi'
Error

Line: 10 Column: 1

              from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
from caffe2.python.fakelowp.test_utils import compute_ulp_error
import caffe2.python.serialized_test.serialized_test_util as serial

core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])

            

Reported by Pylint.

Unable to import 'caffe2.python.fakelowp.test_utils'
Error

Line: 11 Column: 1

              from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
from caffe2.python.fakelowp.test_utils import compute_ulp_error
import caffe2.python.serialized_test.serialized_test_util as serial

core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])


            

Reported by Pylint.

Unable to import 'caffe2.python.fakelowp.test_utils'
Error

Line: 12 Column: 1

              from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
from caffe2.python.fakelowp.test_utils import compute_ulp_error
import caffe2.python.serialized_test.serialized_test_util as serial

core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])

kEpsilon = 1e-8

            

Reported by Pylint.

Unable to import 'caffe2.python.serialized_test.serialized_test_util'
Error

Line: 13 Column: 1

              from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
from caffe2.python.fakelowp.test_utils import compute_ulp_error
import caffe2.python.serialized_test.serialized_test_util as serial

core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])

kEpsilon = 1e-8


            

Reported by Pylint.

caffe2/contrib/aten/gen_op.py
76 issues
Redefining name 'o' from outer scope (line 246)
Error

Line: 103 Column: 12

                  'index': 'internal::index_with_uint8_handling',
}

def expand(o):
    num_defaults = sum(1 if 'default' in arg else 0 for arg in o['arguments'])
    results = [o]
    for i in range(0, num_defaults):
        # last num_default values should be default
        assert('default' in o['arguments'][-(i + 1)])

            

Reported by Pylint.

Redefining name 'i' from outer scope (line 286)
Error

Line: 106 Column: 9

              def expand(o):
    num_defaults = sum(1 if 'default' in arg else 0 for arg in o['arguments'])
    results = [o]
    for i in range(0, num_defaults):
        # last num_default values should be default
        assert('default' in o['arguments'][-(i + 1)])
        v = deepcopy(o)
        v['arguments'] = v['arguments'][:-(i + 1)]
        results.append(v)

            

Reported by Pylint.

Redefining name 'o' from outer scope (line 246)
Error

Line: 116 Column: 14

              

# filter the list of declarations removing things we cannot support
def supports(o, factory_methods):
    # Ignore all families (!) of functions that have TensorOptions (i.e. tensor factory methods).
    if o['name'] in factory_methods:
        if factory_methods[o['name']] == 0:
            print("Skipping {} because it is a factory method".format(o['name']))
        factory_methods[o['name']] += 1

            

Reported by Pylint.

Redefining name 'factory_methods' from outer scope (line 237)
Error

Line: 116 Column: 17

              

# filter the list of declarations removing things we cannot support
def supports(o, factory_methods):
    # Ignore all families (!) of functions that have TensorOptions (i.e. tensor factory methods).
    if o['name'] in factory_methods:
        if factory_methods[o['name']] == 0:
            print("Skipping {} because it is a factory method".format(o['name']))
        factory_methods[o['name']] += 1

            

Reported by Pylint.

Redefining name 'arg' from outer scope (line 286)
Error

Line: 146 Column: 9

                          return False

    # skip arguments we cannot handle
    for arg in o['arguments']:
        if not value_has_tensors(arg) and arg['type'] not in ARGUMENT_MAP:
            print("Skipping {} Because of Arg: {} ({}) ".format(
                  o['name'], arg['type'], arg['dynamic_type']))
            return False
    return True

            

Reported by Pylint.

Redefining name 'i' from outer scope (line 286)
Error

Line: 188 Column: 19

              """)


def get_output(o, i):
    if len(o['returns']) == 1:
        return 'the_result'
    else:
        return '::std::get<{}>(the_result)'.format(i)


            

Reported by Pylint.

Redefining name 'o' from outer scope (line 246)
Error

Line: 188 Column: 16

              """)


def get_output(o, i):
    if len(o['returns']) == 1:
        return 'the_result'
    else:
        return '::std::get<{}>(the_result)'.format(i)


            

Reported by Pylint.

Redefining name 'o' from outer scope (line 246)
Error

Line: 195 Column: 21

                      return '::std::get<{}>(the_result)'.format(i)


def attribute_names(o):
    return sorted([a['name'] for a in o['arguments'] if not value_has_tensors(a)])


def required_attribute_names(o):
    return sorted([a['name'] for a in o['arguments'] if not value_has_tensors(a) and 'default' not in a])

            

Reported by Pylint.

Redefining name 'o' from outer scope (line 246)
Error

Line: 199 Column: 30

                  return sorted([a['name'] for a in o['arguments'] if not value_has_tensors(a)])


def required_attribute_names(o):
    return sorted([a['name'] for a in o['arguments'] if not value_has_tensors(a) and 'default' not in a])


def self_as_first_argument(arguments):
    return ([a for a in arguments if a['name'] == 'self'] +

            

Reported by Pylint.

Redefining name 'o' from outer scope (line 246)
Error

Line: 208 Column: 20

                          [a for a in arguments if a['name'] != 'self'])


def get_num_inputs(o):
    args = 0
    for a in o['arguments']:
        if a['type'] in ['at::TensorList', 'const c10::List<c10::optional<at::Tensor>> &']:
            return '*'
        elif value_has_tensors(a):

            

Reported by Pylint.

torch/distributed/_sharded_tensor/api.py
76 issues
Module 'torch' has no 'Size' member
Error

Line: 70 Column: 11

                  shards_metadata: List[ShardMetadata] = field(default_factory=list)

    # Size of each dim of the overall Tensor.
    size: torch.Size = field(default=torch.Size([]))

    # Regular tensor fields
    dtype: torch.dtype = field(default=torch.get_default_dtype())
    layout: torch.layout = field(default=torch.strided)
    requires_grad: bool = False

            

Reported by Pylint.

Module 'torch' has no 'Size' member
Error

Line: 70 Column: 38

                  shards_metadata: List[ShardMetadata] = field(default_factory=list)

    # Size of each dim of the overall Tensor.
    size: torch.Size = field(default=torch.Size([]))

    # Regular tensor fields
    dtype: torch.dtype = field(default=torch.get_default_dtype())
    layout: torch.layout = field(default=torch.strided)
    requires_grad: bool = False

            

Reported by Pylint.

Module 'torch' has no 'get_default_dtype' member; maybe 'set_default_dtype'?
Error

Line: 73 Column: 40

                  size: torch.Size = field(default=torch.Size([]))

    # Regular tensor fields
    dtype: torch.dtype = field(default=torch.get_default_dtype())
    layout: torch.layout = field(default=torch.strided)
    requires_grad: bool = False
    memory_format: torch.memory_format = field(default=torch.contiguous_format)
    pin_memory: bool = False


            

Reported by Pylint.

Module 'torch' has no 'dtype' member
Error

Line: 73 Column: 12

                  size: torch.Size = field(default=torch.Size([]))

    # Regular tensor fields
    dtype: torch.dtype = field(default=torch.get_default_dtype())
    layout: torch.layout = field(default=torch.strided)
    requires_grad: bool = False
    memory_format: torch.memory_format = field(default=torch.contiguous_format)
    pin_memory: bool = False


            

Reported by Pylint.

Module 'torch' has no 'layout' member
Error

Line: 74 Column: 13

              
    # Regular tensor fields
    dtype: torch.dtype = field(default=torch.get_default_dtype())
    layout: torch.layout = field(default=torch.strided)
    requires_grad: bool = False
    memory_format: torch.memory_format = field(default=torch.contiguous_format)
    pin_memory: bool = False

    def __getstate__(self):

            

Reported by Pylint.

Module 'torch' has no 'strided' member
Error

Line: 74 Column: 42

              
    # Regular tensor fields
    dtype: torch.dtype = field(default=torch.get_default_dtype())
    layout: torch.layout = field(default=torch.strided)
    requires_grad: bool = False
    memory_format: torch.memory_format = field(default=torch.contiguous_format)
    pin_memory: bool = False

    def __getstate__(self):

            

Reported by Pylint.

Module 'torch' has no 'contiguous_format' member
Error

Line: 76 Column: 56

                  dtype: torch.dtype = field(default=torch.get_default_dtype())
    layout: torch.layout = field(default=torch.strided)
    requires_grad: bool = False
    memory_format: torch.memory_format = field(default=torch.contiguous_format)
    pin_memory: bool = False

    def __getstate__(self):
        # Since torch.memory_format cannot be pickled!
        if self.memory_format == torch.contiguous_format:

            

Reported by Pylint.

Module 'torch' has no 'memory_format' member
Error

Line: 76 Column: 20

                  dtype: torch.dtype = field(default=torch.get_default_dtype())
    layout: torch.layout = field(default=torch.strided)
    requires_grad: bool = False
    memory_format: torch.memory_format = field(default=torch.contiguous_format)
    pin_memory: bool = False

    def __getstate__(self):
        # Since torch.memory_format cannot be pickled!
        if self.memory_format == torch.contiguous_format:

            

Reported by Pylint.

Module 'torch' has no 'contiguous_format' member
Error

Line: 81 Column: 34

              
    def __getstate__(self):
        # Since torch.memory_format cannot be pickled!
        if self.memory_format == torch.contiguous_format:
            mem_format_encoding = 0
        elif self.memory_format == torch.channels_last:
            mem_format_encoding = 1
        elif self.memory_format == torch.preserve_format:
            mem_format_encoding = 1

            

Reported by Pylint.

Module 'torch' has no 'channels_last' member
Error

Line: 83 Column: 36

                      # Since torch.memory_format cannot be pickled!
        if self.memory_format == torch.contiguous_format:
            mem_format_encoding = 0
        elif self.memory_format == torch.channels_last:
            mem_format_encoding = 1
        elif self.memory_format == torch.preserve_format:
            mem_format_encoding = 1
        else:
            raise RuntimeError(f'Invalid torch.memory_format: {self.memory_format}')

            

Reported by Pylint.

caffe2/python/operator_test/cross_entropy_ops_test.py
76 issues
Unable to import 'hypothesis'
Error

Line: 6 Column: 1

              

from caffe2.python import core
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np

import unittest

            

Reported by Pylint.

Unable to import 'hypothesis.strategies'
Error

Line: 8 Column: 1

              from caffe2.python import core
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np

import unittest

def sigmoid(x):

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 70 Column: 36

                      **hu.gcs
    )
    def test_sigmoid_cross_entropy_with_logits(
        self, inputs, options, gc, dc
    ):
        logits, targets = inputs
        log_D_trick, unjoined_lr_loss = options

        def sigmoid_xentr_logit_ref(logits, targets):

            

Reported by Pylint.

Unused argument 'outputs'
Error

Line: 89 Column: 49

                          m = np.mean(s, axis=len(logits.shape) - 1)
            return (m, )

        def sigmoid_xentr_logit_grad_ref(g_out, outputs, fwd_inputs):
            fwd_logits, fwd_targets = fwd_inputs
            inner_size = fwd_logits.shape[-1]
            if unjoined_lr_loss:
                m = unjoined_sigmoid_cross_entropy_grad(logits, targets)
            else:

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 125 Column: 32

                      **hu.gcs_cpu_only
    )
    def test_cross_entropy_and_unjoied_cross_entropy_relation(
        self, log_D_trick, gc, dc
    ):
        logits = np.array([1.4720, 0.3500, -0.6529, -1.1908, 0.8357,
                    -1.0774, -0.3395, -0.2469, 0.6708, -1.8332], dtype='f')
        targets = np.array([1., 1., 1., 1., 1., 1., 0., 0., 0., 0.], dtype='f')
        lr_size = targets.size

            

Reported by Pylint.

Unused argument 'outputs'
Error

Line: 141 Column: 49

                          m = np.mean(s, axis=len(logits.shape) - 1)
            return (m, )

        def sigmoid_xentr_logit_grad_ref(g_out, outputs, fwd_inputs):
            fwd_logits, fwd_targets = fwd_inputs
            inner_size = fwd_logits.shape[-1]
            if unjoined_lr_loss:
                m = unjoined_sigmoid_cross_entropy_grad(logits, targets)
            else:

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 221 Column: 75

                      ),
        **hu.gcs
    )
    def test_weighted_sigmoid_cross_entropy_with_logits(self, inputs, gc, dc):
        logits, targets, weights = inputs

        def weighted_sigmoid_xentr_logit_ref(logits, targets, weights):
            s = sigmoid_cross_entropy_with_logits(logits, targets)
            s = np.multiply(s, weights)

            

Reported by Pylint.

Unused argument 'outputs'
Error

Line: 230 Column: 58

                          m = np.mean(s, axis=len(logits.shape) - 1)
            return (m, )

        def weighted_sigmoid_xentr_logit_grad_ref(g_out, outputs, fwd_inputs):
            fwd_logits, fwd_targets, fwd_weights = fwd_inputs
            inner_size = fwd_logits.shape[-1]
            m = fwd_targets - sigmoid(fwd_logits)
            m = np.multiply(m, weights)
            g_in = -np.expand_dims(g_out, axis=-1) * m / inner_size

            

Reported by Pylint.

Unused variable 'fwd_weights'
Error

Line: 231 Column: 38

                          return (m, )

        def weighted_sigmoid_xentr_logit_grad_ref(g_out, outputs, fwd_inputs):
            fwd_logits, fwd_targets, fwd_weights = fwd_inputs
            inner_size = fwd_logits.shape[-1]
            m = fwd_targets - sigmoid(fwd_logits)
            m = np.multiply(m, weights)
            g_in = -np.expand_dims(g_out, axis=-1) * m / inner_size
            return (g_in, None, None)

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 253 Column: 55

                  @given(n=st.integers(2, 10),
           b=st.integers(1, 5),
           **hu.gcs_cpu_only)
    def test_soft_label_cross_entropy(self, n, b, gc, dc):
        # Initialize X and add 1e-2 for numerical stability
        X = np.random.rand(b, n).astype(np.float32)
        X = X + 1e-2
        for i in range(b):
            X[i] = X[i] / np.sum(X[i])

            

Reported by Pylint.

tools/autograd/gen_inplace_or_view_type.py
76 issues
Attempted relative import beyond top-level package
Error

Line: 17 Column: 1

              from typing import List, Optional, Sequence, Tuple
from tools.codegen.gen import FileManager
from tools.codegen.utils import mapMaybe
from .context import with_native_function_with_differentiability_info
from .gen_trace_type import (
    MANUAL_AUTOGRAD, type_wrapper_name, tie_return_values, get_return_value
)



            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 18 Column: 1

              from tools.codegen.gen import FileManager
from tools.codegen.utils import mapMaybe
from .context import with_native_function_with_differentiability_info
from .gen_trace_type import (
    MANUAL_AUTOGRAD, type_wrapper_name, tie_return_values, get_return_value
)


# See NOTE [ Autograd View Variables ] in variable.h for details.

            

Reported by Pylint.

FIXME: clone indices on construction.
Error

Line: 59 Column: 3

                  # sparse_coo ctor output should really be views of both indices and values,
    # but we only supports making as view of a single variable, and indices is
    # discrete anyways.
    # FIXME: clone indices on construction.
    'sparse_coo_tensor_with_dims_and_tensors': 'values',
    '_reshape_alias': 'self',
}

for key in VIEW_FUNCTIONS_WITH_METADATA_CHANGE:

            

Reported by Pylint.

FIXME: Ideally these functions should be methods on Type class, but we have a
Error

Line: 147 Column: 3

              
TMP_VAR = '_tmp'

# FIXME: Ideally these functions should be methods on Type class, but we have a
#        comment in codegen/model.py there saying these concepts are not well defined.
#        Thus we put a version that commonly used by autograd codegen here.
def is_tensor_type(t: Type) -> bool:
    # TODO: Should handle optional here?
    return t.is_tensor_like() and t.is_list_like() is None

            

Reported by Pylint.

TODO: Should handle optional here?
Error

Line: 151 Column: 3

              #        comment in codegen/model.py there saying these concepts are not well defined.
#        Thus we put a version that commonly used by autograd codegen here.
def is_tensor_type(t: Type) -> bool:
    # TODO: Should handle optional here?
    return t.is_tensor_like() and t.is_list_like() is None

def is_tensor_list_type(t: Type) -> bool:
    # TODO: Should handle optional here?
    return t.is_tensor_like() and t.is_list_like() is not None

            

Reported by Pylint.

TODO: Should handle optional here?
Error

Line: 155 Column: 3

                  return t.is_tensor_like() and t.is_list_like() is None

def is_tensor_list_type(t: Type) -> bool:
    # TODO: Should handle optional here?
    return t.is_tensor_like() and t.is_list_like() is not None

UNPACK_TENSOR = CodeTemplate("""\
auto${ref} ${arg_name}_ = unpack${suffix}(${arg_name}, "${arg_name}", ${arg_pos});""")


            

Reported by Pylint.

TODO: should be str(f.func.name.name)?
Error

Line: 205 Column: 3

                  return body, unpacked_bindings

def get_base_name(f: NativeFunction) -> str:
    return f.func.name.name.base  # TODO: should be str(f.func.name.name)?

def get_view_info(fn: NativeFunctionWithDifferentiabilityInfo) -> Optional[str]:
    f = fn.func
    base_name = get_base_name(f)
    view_info = VIEW_FUNCTIONS.get(base_name, None)

            

Reported by Pylint.

Unused argument 'native_yaml_path'
Error

Line: 428 Column: 5

              
def gen_inplace_or_view_type(
    out: str,
    native_yaml_path: str,
    fns_with_infos: List[NativeFunctionWithDifferentiabilityInfo],
    template_path: str
) -> None:
    # NOTE: see Note [Sharded File] at the top of the VariableType.cpp
    # template regarding sharding of the generated files.

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from tools.codegen.api import cpp
from tools.codegen.api.autograd import (
    NativeFunctionWithDifferentiabilityInfo, gen_differentiable_outputs,
    dispatch_strategy,
)
from tools.codegen.api.types import (Binding, DispatcherSignature, CppSignatureGroup, CType,
                                     BaseCType, OptionalCType, intT, boolT, intArrayRefT)
from tools.codegen.code_template import CodeTemplate
from tools.codegen.context import with_native_function

            

Reported by Pylint.

standard import "from typing import List, Optional, Sequence, Tuple" should be placed before "from tools.codegen.api import cpp"
Error

Line: 14 Column: 1

                  Type, NativeFunction, SelfArgument, TensorOptionsArguments, Variant,
    SchemaKind, is_foreach_op,
)
from typing import List, Optional, Sequence, Tuple
from tools.codegen.gen import FileManager
from tools.codegen.utils import mapMaybe
from .context import with_native_function_with_differentiability_info
from .gen_trace_type import (
    MANUAL_AUTOGRAD, type_wrapper_name, tie_return_values, get_return_value

            

Reported by Pylint.

.circleci/cimodel/data/pytorch_build_data.py
76 issues
Instance of 'TreeConfigNode' has no 'child_constructor' member
Error

Line: 113 Column: 17

                      pass

    def get_children(self):
        return [self.child_constructor()(self, k, v) for (k, v) in self.subtree]


class TopLevelNode(TreeConfigNode):
    def __init__(self, node_name, subtree):
        super(TopLevelNode, self).__init__(None, node_name, subtree)

            

Reported by Pylint.

TODO: bring back libtorch test
Error

Line: 15 Column: 3

                                  ("pure_torch", [X(True)]),
                ]),
            ]),
            # TODO: bring back libtorch test
            ("7", [X("3.6")]),
        ]),
        ("clang", [
            ("7", [
                ("3.6", [

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from cimodel.lib.conf_tree import ConfigNode, X, XImportant


CONFIG_TREE_DATA = [
    ("xenial", [
        ("gcc", [
            ("5.4", [  # All this subtree rebases to master and then build
                ("3.6", [
                    ("important", [X(True)]),

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 95 Column: 1

              ]


def get_major_pyver(dotted_version):
    parts = dotted_version.split(".")
    return "py" + parts[0]


class TreeConfigNode(ConfigNode):

            

Reported by Pylint.

Missing class docstring
Error

Line: 100 Column: 1

                  return "py" + parts[0]


class TreeConfigNode(ConfigNode):
    def __init__(self, parent, node_name, subtree):
        super(TreeConfigNode, self).__init__(parent, self.modify_label(node_name))
        self.subtree = subtree
        self.init2(node_name)


            

Reported by Pylint.

Consider using Python 3 style super() without arguments
Error

Line: 102 Column: 9

              
class TreeConfigNode(ConfigNode):
    def __init__(self, parent, node_name, subtree):
        super(TreeConfigNode, self).__init__(parent, self.modify_label(node_name))
        self.subtree = subtree
        self.init2(node_name)

    def modify_label(self, label):
        return label

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 106 Column: 5

                      self.subtree = subtree
        self.init2(node_name)

    def modify_label(self, label):
        return label

    def init2(self, node_name):
        pass


            

Reported by Pylint.

Method could be a function
Error

Line: 106 Column: 5

                      self.subtree = subtree
        self.init2(node_name)

    def modify_label(self, label):
        return label

    def init2(self, node_name):
        pass


            

Reported by Pylint.

Missing function or method docstring
Error

Line: 109 Column: 5

                  def modify_label(self, label):
        return label

    def init2(self, node_name):
        pass

    def get_children(self):
        return [self.child_constructor()(self, k, v) for (k, v) in self.subtree]


            

Reported by Pylint.

Missing class docstring
Error

Line: 116 Column: 1

                      return [self.child_constructor()(self, k, v) for (k, v) in self.subtree]


class TopLevelNode(TreeConfigNode):
    def __init__(self, node_name, subtree):
        super(TopLevelNode, self).__init__(None, node_name, subtree)

    # noinspection PyMethodMayBeStatic
    def child_constructor(self):

            

Reported by Pylint.

test/run_test.py
75 issues
subprocess call with shell=True identified, security issue.
Security injection

Line: 593
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html

                                          'mpiexec --allow-run-as-root -n 1 bash -c ""', shell=True,
                            stdout=devnull, stderr=subprocess.STDOUT) == 0 else ''
                        noprefix_opt = '--noprefix' if subprocess.call(
                            f'mpiexec {allowrunasroot_opt} -n 1 --noprefix bash -c ""', shell=True,
                            stdout=devnull, stderr=subprocess.STDOUT) == 0 else ''

                    mpiexec = ['mpiexec', '-n', '3', noprefix_opt, allowrunasroot_opt]

                    return_code = run_test(test_module, test_directory, options,

            

Reported by Bandit.

Unable to import 'torch'
Error

Line: 15 Column: 1

              import sys
import tempfile

import torch
from torch.utils import cpp_extension
from torch.testing._internal.common_utils import FILE_SCHEMA, IS_IN_CI, TEST_WITH_ROCM, shell, set_cwd
import torch.distributed as dist
from typing import Dict, Optional, List


            

Reported by Pylint.

Unable to import 'torch.utils'
Error

Line: 16 Column: 1

              import tempfile

import torch
from torch.utils import cpp_extension
from torch.testing._internal.common_utils import FILE_SCHEMA, IS_IN_CI, TEST_WITH_ROCM, shell, set_cwd
import torch.distributed as dist
from typing import Dict, Optional, List

try:

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 17 Column: 1

              
import torch
from torch.utils import cpp_extension
from torch.testing._internal.common_utils import FILE_SCHEMA, IS_IN_CI, TEST_WITH_ROCM, shell, set_cwd
import torch.distributed as dist
from typing import Dict, Optional, List

try:
    # using tools/ to optimize test run.

            

Reported by Pylint.

Unable to import 'torch.distributed'
Error

Line: 18 Column: 1

              import torch
from torch.utils import cpp_extension
from torch.testing._internal.common_utils import FILE_SCHEMA, IS_IN_CI, TEST_WITH_ROCM, shell, set_cwd
import torch.distributed as dist
from typing import Dict, Optional, List

try:
    # using tools/ to optimize test run.
    sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))

            

Reported by Pylint.

Unable to import 'coverage'
Error

Line: 1078 Column: 13

                          print_to_stderr(err_message)
    finally:
        if options.coverage:
            from coverage import Coverage
            test_dir = os.path.dirname(os.path.abspath(__file__))
            with set_cwd(test_dir):
                cov = Coverage()
                if PYTORCH_COLLECT_COVERAGE:
                    cov.load()

            

Reported by Pylint.

TODO: move this logic into common_utils.py instead of passing in "-k" individually
Error

Line: 473 Column: 3

                  executable = get_executable_command(options, allow_pytest=not extra_unittest_args,
                                        disable_coverage=disable_coverage)

    # TODO: move this logic into common_utils.py instead of passing in "-k" individually
    # The following logic for running specified tests will only run for non-distributed tests, as those are dispatched
    # to test_distributed and not run_test (this function)
    if options.run_specified_test_cases:
        unittest_args.extend(get_test_case_args(test_module, 'pytest' in executable))


            

Reported by Pylint.

Unused argument 'test_module'
Error

Line: 543 Column: 35

                          os.remove(test_directory + '/' + test_module + '.py')


def test_cpp_extensions_aot_ninja(test_module, test_directory, options):
    return _test_cpp_extensions_aot(test_directory, options, use_ninja=True)


def test_cpp_extensions_aot_no_ninja(test_module, test_directory, options):
    return _test_cpp_extensions_aot(test_directory, options, use_ninja=False)

            

Reported by Pylint.

Unused argument 'test_module'
Error

Line: 547 Column: 38

                  return _test_cpp_extensions_aot(test_directory, options, use_ninja=True)


def test_cpp_extensions_aot_no_ninja(test_module, test_directory, options):
    return _test_cpp_extensions_aot(test_directory, options, use_ninja=False)


def test_distributed(test_module, test_directory, options):
    # MPI tests are broken with Python-3.9

            

Reported by Pylint.

Unused argument 'kwargs'
Error

Line: 623 Column: 1

              

class TestChoices(list):
    def __init__(self, *args, **kwargs):
        super(TestChoices, self).__init__(args[0])

    def __contains__(self, item):
        return list.__contains__(self, parse_test_module(item))


            

Reported by Pylint.