The following issues were found

benchmarks/operator_benchmark/pt/qarithmetic_test.py
33 issues
Unable to import 'torch'
Error

Line: 1 Column: 1

              import torch
from torch._ops import ops
import operator_benchmark as op_bench

qarithmetic_binary_configs = op_bench.cross_product_configs(
    N=(2, 8, 64, 512),
    dtype=(torch.quint8, torch.qint8, torch.qint32),
    # contig=(False, True),  # TODO: Reenable this after #29435
    contig=(True,),

            

Reported by Pylint.

Unable to import 'torch._ops'
Error

Line: 2 Column: 1

              import torch
from torch._ops import ops
import operator_benchmark as op_bench

qarithmetic_binary_configs = op_bench.cross_product_configs(
    N=(2, 8, 64, 512),
    dtype=(torch.quint8, torch.qint8, torch.qint32),
    # contig=(False, True),  # TODO: Reenable this after #29435
    contig=(True,),

            

Reported by Pylint.

Module 'operator_benchmark' has no 'cross_product_configs' member
Error

Line: 5 Column: 30

              from torch._ops import ops
import operator_benchmark as op_bench

qarithmetic_binary_configs = op_bench.cross_product_configs(
    N=(2, 8, 64, 512),
    dtype=(torch.quint8, torch.qint8, torch.qint32),
    # contig=(False, True),  # TODO: Reenable this after #29435
    contig=(True,),
    tags=('short',)

            

Reported by Pylint.

Module 'operator_benchmark' has no 'op_list' member
Error

Line: 14 Column: 26

              )


qarithmetic_binary_ops = op_bench.op_list(
    attrs=(
        ('add', ops.quantized.add),
        ('add_relu', ops.quantized.add_relu),
        ('mul', ops.quantized.mul),
    ),

            

Reported by Pylint.

Module 'operator_benchmark' has no 'op_list' member
Error

Line: 23 Column: 33

                  attr_names=('op_name', 'op_func'),
)

qarithmetic_binary_scalar_ops = op_bench.op_list(
    attrs=(
        ('add_scalar', ops.quantized.add_scalar),
        ('mul_scalar', ops.quantized.mul_scalar),
    ),
    attr_names=('op_name', 'op_func'),

            

Reported by Pylint.

Module 'operator_benchmark' has no 'TorchBenchmarkBase' member
Error

Line: 31 Column: 49

                  attr_names=('op_name', 'op_func'),
)

class _QFunctionalBinaryArithmeticBenchmarkBase(op_bench.TorchBenchmarkBase):
    def setup(self, N, dtype, contig):
        self.qfunctional = torch.nn.quantized.QFunctional()

        # TODO: Consider more diverse shapes
        f_input = (torch.rand(N, N) - 0.5) * 256

            

Reported by Pylint.

Module 'operator_benchmark' has no 'generate_pt_tests_from_op_list' member
Error

Line: 63 Column: 1

                      return self.op_func(q_input_a, q_input_b, scale=scale, zero_point=zero_point)


op_bench.generate_pt_tests_from_op_list(qarithmetic_binary_ops,
                                        qarithmetic_binary_configs,
                                        QFunctionalBenchmark)


class QFunctionalScalarBenchmark(_QFunctionalBinaryArithmeticBenchmarkBase):

            

Reported by Pylint.

Module 'operator_benchmark' has no 'generate_pt_tests_from_op_list' member
Error

Line: 81 Column: 1

                      return self.op_func(q_input, scalar_input)


op_bench.generate_pt_tests_from_op_list(qarithmetic_binary_scalar_ops,
                                        qarithmetic_binary_configs,
                                        QFunctionalScalarBenchmark)


if __name__ == '__main__':

            

Reported by Pylint.

contig=(False, True), # TODO: Reenable this after #29435
Error

Line: 8 Column: 28

              qarithmetic_binary_configs = op_bench.cross_product_configs(
    N=(2, 8, 64, 512),
    dtype=(torch.quint8, torch.qint8, torch.qint32),
    # contig=(False, True),  # TODO: Reenable this after #29435
    contig=(True,),
    tags=('short',)
)



            

Reported by Pylint.

Attribute 'qfunctional' defined outside __init__
Error

Line: 33 Column: 9

              
class _QFunctionalBinaryArithmeticBenchmarkBase(op_bench.TorchBenchmarkBase):
    def setup(self, N, dtype, contig):
        self.qfunctional = torch.nn.quantized.QFunctional()

        # TODO: Consider more diverse shapes
        f_input = (torch.rand(N, N) - 0.5) * 256
        self.scale = 1.0
        self.zero_point = 0

            

Reported by Pylint.

torch/quantization/fx/match_utils.py
33 issues
Attempted relative import beyond top-level package
Error

Line: 7 Column: 1

                  Graph,
    Node,
)
from .quantization_types import Pattern
from .quantization_patterns import (
    QuantizeHandler,
    CustomModuleQuantizeHandler,
    StandaloneModuleQuantizeHandler,
    BinaryOpQuantizeHandler,

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 8 Column: 1

                  Node,
)
from .quantization_types import Pattern
from .quantization_patterns import (
    QuantizeHandler,
    CustomModuleQuantizeHandler,
    StandaloneModuleQuantizeHandler,
    BinaryOpQuantizeHandler,
    binary_op_supported_dtypes,

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 16 Column: 1

                  binary_op_supported_dtypes,
    binary_reference_op_supported_dtypes,
)
from ..qconfig import (
    QConfigAny,
)
from .graph_module import (
    is_observed_standalone_module,
)

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 19 Column: 1

              from ..qconfig import (
    QConfigAny,
)
from .graph_module import (
    is_observed_standalone_module,
)
from ..utils import get_qconfig_dtypes

from typing import Any, Dict, List, Callable, Optional, Tuple, Set

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 22 Column: 1

              from .graph_module import (
    is_observed_standalone_module,
)
from ..utils import get_qconfig_dtypes

from typing import Any, Dict, List, Callable, Optional, Tuple, Set

MatchResult = Tuple[Node, List[Node], Optional[Pattern], QuantizeHandler,
                    QConfigAny]

            

Reported by Pylint.

Unnecessary pass statement
Error

Line: 32 Column: 5

              class MatchAllNode:
    """ A node pattern that matches all nodes
    """
    pass

# Note: The order of patterns is important! match function will take whatever is matched first, so we'll
# need to put the fusion patterns before single patterns. For example, add_relu should be registered come before relu.
# decorators are applied in the reverse order we see. Also when we match the nodes in the graph with these patterns,
# we'll start from the last node of the graph and traverse back.

            

Reported by Pylint.

Unused variable 'cache_for_no_tensor_check'
Error

Line: 133 Column: 5

                      else:
            matched.append(node)

    cache_for_no_tensor_check: Dict[Node, bool] = dict()
    for node in reversed(graph.nodes):
        if node.name not in match_map and node.name not in all_matched:
            for pattern, value in patterns.items():
                if is_match(modules, node, pattern):
                    skip_this_match = False

            

Reported by Pylint.

TODO(future PR): update the pattern to quantize
Error

Line: 157 Column: 3

                                          qconfig_map[base_node.name]
                        if this_node_qconfig:
                            dtypes = get_qconfig_dtypes(this_node_qconfig)
                            # TODO(future PR): update the pattern to quantize
                            # handler logic to take this into account.


                            # This needs to handle 3 cases
                            # 1) op and dtype is in either [is_ref or non-ref] list -> don't skip

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import sys
import torch
from torch.fx.graph import (
    Graph,
    Node,
)
from .quantization_types import Pattern
from .quantization_patterns import (
    QuantizeHandler,

            

Reported by Pylint.

standard import "from typing import Any, Dict, List, Callable, Optional, Tuple, Set" should be placed before "import torch"
Error

Line: 24 Column: 1

              )
from ..utils import get_qconfig_dtypes

from typing import Any, Dict, List, Callable, Optional, Tuple, Set

MatchResult = Tuple[Node, List[Node], Optional[Pattern], QuantizeHandler,
                    QConfigAny]

class MatchAllNode:

            

Reported by Pylint.

torch/fx/experimental/unification/multipledispatch/conflict.py
33 issues
Attempted relative import beyond top-level package
Error

Line: 1 Column: 1

              from .utils import _toposort, groupby
from .variadic import isvariadic


class AmbiguityWarning(Warning):
    pass


def supercedes(a, b):

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 2 Column: 1

              from .utils import _toposort, groupby
from .variadic import isvariadic


class AmbiguityWarning(Warning):
    pass


def supercedes(a, b):

            

Reported by Pylint.

Positional arguments appear to be out of order
Error

Line: 75 Column: 58

              
def ambiguous(a, b):
    """ A is consistent with B but neither is strictly more specific """
    return consistent(a, b) and not (supercedes(a, b) or supercedes(b, a))


def ambiguities(signatures):
    """ All signature pairs such that A is ambiguous with B """
    signatures = list(map(tuple, signatures))

            

Reported by Pylint.

Positional arguments appear to be out of order
Error

Line: 103 Column: 38

                  """
    # A either supercedes B and B does not supercede A or if B does then call
    # tie_breaker
    return supercedes(a, b) and (not supercedes(b, a) or tie_breaker(a) > tie_breaker(b))


def ordering(signatures):
    """ A sane ordering of signatures to check, first to last
    Topoological sort of edges as given by ``edge`` and ``supercedes``

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from .utils import _toposort, groupby
from .variadic import isvariadic


class AmbiguityWarning(Warning):
    pass


def supercedes(a, b):

            

Reported by Pylint.

Missing class docstring
Error

Line: 5 Column: 1

              from .variadic import isvariadic


class AmbiguityWarning(Warning):
    pass


def supercedes(a, b):
    """ A is consistent and strictly more specific than B """

            

Reported by Pylint.

Argument name "a" doesn't conform to snake_case naming style
Error

Line: 9 Column: 1

                  pass


def supercedes(a, b):
    """ A is consistent and strictly more specific than B """
    if len(a) < len(b):
        # only case is if a is empty and b is variadic
        return not a and len(b) == 1 and isvariadic(b[-1])
    elif len(a) == len(b):

            

Reported by Pylint.

Argument name "b" doesn't conform to snake_case naming style
Error

Line: 9 Column: 1

                  pass


def supercedes(a, b):
    """ A is consistent and strictly more specific than B """
    if len(a) < len(b):
        # only case is if a is empty and b is variadic
        return not a and len(b) == 1 and isvariadic(b[-1])
    elif len(a) == len(b):

            

Reported by Pylint.

Unnecessary "elif" after "return"
Error

Line: 11 Column: 5

              
def supercedes(a, b):
    """ A is consistent and strictly more specific than B """
    if len(a) < len(b):
        # only case is if a is empty and b is variadic
        return not a and len(b) == 1 and isvariadic(b[-1])
    elif len(a) == len(b):
        return all(map(issubclass, a, b))
    else:

            

Reported by Pylint.

Variable name "p1" doesn't conform to snake_case naming style
Error

Line: 18 Column: 9

                      return all(map(issubclass, a, b))
    else:
        # len(a) > len(b)
        p1 = 0
        p2 = 0
        while p1 < len(a) and p2 < len(b):
            cur_a = a[p1]
            cur_b = b[p2]
            if not (isvariadic(cur_a) or isvariadic(cur_b)):

            

Reported by Pylint.

caffe2/python/brew.py
33 issues
Redefining built-in 'sum'
Error

Line: 15 Column: 1

              from caffe2.python.model_helper import ModelHelper

# flake8: noqa
from caffe2.python.helpers.algebra import *
from caffe2.python.helpers.arg_scope import *
from caffe2.python.helpers.array_helpers import *
from caffe2.python.helpers.control_ops import *
from caffe2.python.helpers.conv import *
from caffe2.python.helpers.db_input import *

            

Reported by Pylint.

Wildcard import caffe2.python.helpers.algebra
Error

Line: 15 Column: 1

              from caffe2.python.model_helper import ModelHelper

# flake8: noqa
from caffe2.python.helpers.algebra import *
from caffe2.python.helpers.arg_scope import *
from caffe2.python.helpers.array_helpers import *
from caffe2.python.helpers.control_ops import *
from caffe2.python.helpers.conv import *
from caffe2.python.helpers.db_input import *

            

Reported by Pylint.

Wildcard import caffe2.python.helpers.arg_scope
Error

Line: 16 Column: 1

              
# flake8: noqa
from caffe2.python.helpers.algebra import *
from caffe2.python.helpers.arg_scope import *
from caffe2.python.helpers.array_helpers import *
from caffe2.python.helpers.control_ops import *
from caffe2.python.helpers.conv import *
from caffe2.python.helpers.db_input import *
from caffe2.python.helpers.dropout import *

            

Reported by Pylint.

Unused import threading from wildcard import
Error

Line: 16 Column: 1

              
# flake8: noqa
from caffe2.python.helpers.algebra import *
from caffe2.python.helpers.arg_scope import *
from caffe2.python.helpers.array_helpers import *
from caffe2.python.helpers.control_ops import *
from caffe2.python.helpers.conv import *
from caffe2.python.helpers.db_input import *
from caffe2.python.helpers.dropout import *

            

Reported by Pylint.

Unused import contextlib from wildcard import
Error

Line: 16 Column: 1

              
# flake8: noqa
from caffe2.python.helpers.algebra import *
from caffe2.python.helpers.arg_scope import *
from caffe2.python.helpers.array_helpers import *
from caffe2.python.helpers.control_ops import *
from caffe2.python.helpers.conv import *
from caffe2.python.helpers.db_input import *
from caffe2.python.helpers.dropout import *

            

Reported by Pylint.

Wildcard import caffe2.python.helpers.array_helpers
Error

Line: 17 Column: 1

              # flake8: noqa
from caffe2.python.helpers.algebra import *
from caffe2.python.helpers.arg_scope import *
from caffe2.python.helpers.array_helpers import *
from caffe2.python.helpers.control_ops import *
from caffe2.python.helpers.conv import *
from caffe2.python.helpers.db_input import *
from caffe2.python.helpers.dropout import *
from caffe2.python.helpers.elementwise_linear import *

            

Reported by Pylint.

Wildcard import caffe2.python.helpers.control_ops
Error

Line: 18 Column: 1

              from caffe2.python.helpers.algebra import *
from caffe2.python.helpers.arg_scope import *
from caffe2.python.helpers.array_helpers import *
from caffe2.python.helpers.control_ops import *
from caffe2.python.helpers.conv import *
from caffe2.python.helpers.db_input import *
from caffe2.python.helpers.dropout import *
from caffe2.python.helpers.elementwise_linear import *
from caffe2.python.helpers.fc import *

            

Reported by Pylint.

Unused import add_if_op from wildcard import
Error

Line: 18 Column: 1

              from caffe2.python.helpers.algebra import *
from caffe2.python.helpers.arg_scope import *
from caffe2.python.helpers.array_helpers import *
from caffe2.python.helpers.control_ops import *
from caffe2.python.helpers.conv import *
from caffe2.python.helpers.db_input import *
from caffe2.python.helpers.dropout import *
from caffe2.python.helpers.elementwise_linear import *
from caffe2.python.helpers.fc import *

            

Reported by Pylint.

Unused import add_while_op from wildcard import
Error

Line: 18 Column: 1

              from caffe2.python.helpers.algebra import *
from caffe2.python.helpers.arg_scope import *
from caffe2.python.helpers.array_helpers import *
from caffe2.python.helpers.control_ops import *
from caffe2.python.helpers.conv import *
from caffe2.python.helpers.db_input import *
from caffe2.python.helpers.dropout import *
from caffe2.python.helpers.elementwise_linear import *
from caffe2.python.helpers.fc import *

            

Reported by Pylint.

Unused import initializers from wildcard import
Error

Line: 19 Column: 1

              from caffe2.python.helpers.arg_scope import *
from caffe2.python.helpers.array_helpers import *
from caffe2.python.helpers.control_ops import *
from caffe2.python.helpers.conv import *
from caffe2.python.helpers.db_input import *
from caffe2.python.helpers.dropout import *
from caffe2.python.helpers.elementwise_linear import *
from caffe2.python.helpers.fc import *
from caffe2.python.helpers.nonlinearity import *

            

Reported by Pylint.

torch/jit/_monkeytype_config.py
33 issues
Unused trace imported from monkeytype as monkeytype_trace
Error

Line: 12 Column: 5

              _IS_MONKEYTYPE_INSTALLED = True
try:
    import monkeytype  # type: ignore[import]
    from monkeytype import trace as monkeytype_trace
    from monkeytype.db.base import CallTraceThunk, CallTraceStore, CallTraceStoreLogger  # type: ignore[import]
    from monkeytype.config import _startswith, LIB_PATHS  # type: ignore[import]
    from monkeytype.tracing import CallTrace, CodeFilter  # type: ignore[import]
except ImportError:
    _IS_MONKEYTYPE_INSTALLED = False

            

Reported by Pylint.

Useless super delegation in method '__init__'
Error

Line: 47 Column: 9

              
    class JitTypeTraceStoreLogger(CallTraceStoreLogger):
        """A JitTypeCallTraceLogger that stores logged traces in a CallTraceStore."""
        def __init__(self, store: CallTraceStore):
            super().__init__(store)

        def log(self, trace: CallTrace) -> None:
            self.traces.append(trace)


            

Reported by Pylint.

Unused argument 'qualname_prefix'
Error

Line: 69 Column: 13

                      def filter(
            self,
            qualified_name: str,
            qualname_prefix: Optional[str] = None,
            limit: int = 2000
        ) -> List[CallTraceThunk]:
            return self.trace_records[qualified_name]

        def analyze(self, qualified_name: str) -> Dict:

            

Reported by Pylint.

Unused argument 'limit'
Error

Line: 70 Column: 13

                          self,
            qualified_name: str,
            qualname_prefix: Optional[str] = None,
            limit: int = 2000
        ) -> List[CallTraceThunk]:
            return self.trace_records[qualified_name]

        def analyze(self, qualified_name: str) -> Dict:
            # Analyze the types for the given module

            

Reported by Pylint.

TODO: To remove this check once Union suppport in TorchScript lands.
Error

Line: 109 Column: 3

                              _all_type = _all_type.lstrip(" ")  # Remove any trailing spaces

                if len(types) == 2 and 'NoneType' in _all_type:
                    # TODO: To remove this check once Union suppport in TorchScript lands.
                    all_args[arg] = {get_optional_of_element_type(_all_type)}
                elif len(types) > 1:
                    all_args[arg] = {'Any'}
                else:
                    all_args[arg] = {_all_type[:-1]}

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import inspect
import typing
import pathlib
import torch
from typing import Optional, Iterable, List, Dict
from collections import defaultdict
from types import CodeType

_IS_MONKEYTYPE_INSTALLED = True

            

Reported by Pylint.

standard import "from typing import Optional, Iterable, List, Dict" should be placed before "import torch"
Error

Line: 5 Column: 1

              import typing
import pathlib
import torch
from typing import Optional, Iterable, List, Dict
from collections import defaultdict
from types import CodeType

_IS_MONKEYTYPE_INSTALLED = True
try:

            

Reported by Pylint.

standard import "from collections import defaultdict" should be placed before "import torch"
Error

Line: 6 Column: 1

              import pathlib
import torch
from typing import Optional, Iterable, List, Dict
from collections import defaultdict
from types import CodeType

_IS_MONKEYTYPE_INSTALLED = True
try:
    import monkeytype  # type: ignore[import]

            

Reported by Pylint.

standard import "from types import CodeType" should be placed before "import torch"
Error

Line: 7 Column: 1

              import torch
from typing import Optional, Iterable, List, Dict
from collections import defaultdict
from types import CodeType

_IS_MONKEYTYPE_INSTALLED = True
try:
    import monkeytype  # type: ignore[import]
    from monkeytype import trace as monkeytype_trace

            

Reported by Pylint.

Line too long (111/100)
Error

Line: 13 Column: 1

              try:
    import monkeytype  # type: ignore[import]
    from monkeytype import trace as monkeytype_trace
    from monkeytype.db.base import CallTraceThunk, CallTraceStore, CallTraceStoreLogger  # type: ignore[import]
    from monkeytype.config import _startswith, LIB_PATHS  # type: ignore[import]
    from monkeytype.tracing import CallTrace, CodeFilter  # type: ignore[import]
except ImportError:
    _IS_MONKEYTYPE_INSTALLED = False


            

Reported by Pylint.

caffe2/python/ideep/expanddims_squeeze_op_test.py
33 issues
Unable to import 'hypothesis.strategies'
Error

Line: 7 Column: 1

              

import unittest
import hypothesis.strategies as st
from hypothesis import given
import numpy as np
from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.ideep_test_util as mu

            

Reported by Pylint.

Unable to import 'hypothesis'
Error

Line: 8 Column: 1

              
import unittest
import hypothesis.strategies as st
from hypothesis import given
import numpy as np
from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.ideep_test_util as mu


            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'use_mkldnn' member
Error

Line: 15 Column: 22

              import caffe2.python.ideep_test_util as mu


@unittest.skipIf(not workspace.C.use_mkldnn, "No MKLDNN support.")
class ExpandDimsSqueezeTest(hu.HypothesisTestCase):
    @given(
        squeeze_dims=st.lists(st.integers(0, 3), min_size=1, max_size=3),
        inplace=st.booleans(),
        **mu.gcs

            

Reported by Pylint.

Unused argument 'gc'
Error

Line: 22 Column: 51

                      inplace=st.booleans(),
        **mu.gcs
        )
    def test_squeeze(self, squeeze_dims, inplace, gc, dc):
        shape = [
            1 if dim in squeeze_dims else np.random.randint(1, 5)
            for dim in range(4)
        ]
        X = np.random.rand(*shape).astype(np.float32)

            

Reported by Pylint.

Unused argument 'gc'
Error

Line: 38 Column: 60

                      inplace=st.booleans(),
        **mu.gcs_cpu_ideep
        )
    def test_squeeze_fallback(self, squeeze_dims, inplace, gc, dc):
        shape = [
            1 if dim in squeeze_dims else np.random.randint(1, 5)
            for dim in range(4)
        ]
        X = np.random.rand(*shape).astype(np.float32)

            

Reported by Pylint.

Redundant use of assertTrue with constant value False
Error

Line: 70 Column: 13

                          print(Y1.flatten())
            print(Y0.flatten())
            print(np.max(np.abs(Y1 - Y0)))
            self.assertTrue(False)


    @given(
        squeeze_dims=st.lists(st.integers(0, 3), min_size=1, max_size=3),
        inplace=st.booleans(),

            

Reported by Pylint.

Unused argument 'gc'
Error

Line: 78 Column: 55

                      inplace=st.booleans(),
        **mu.gcs
        )
    def test_expand_dims(self, squeeze_dims, inplace, gc, dc):
        oshape = [
            1 if dim in squeeze_dims else np.random.randint(2, 5)
            for dim in range(4)
        ]
        nshape = [s for s in oshape if s!=1]

            

Reported by Pylint.

Unused argument 'gc'
Error

Line: 97 Column: 64

                      inplace=st.booleans(),
        **mu.gcs_cpu_ideep
        )
    def test_expand_dims_fallback(self, squeeze_dims, inplace, gc, dc):
        oshape = [
            1 if dim in squeeze_dims else np.random.randint(2, 5)
            for dim in range(4)
        ]
        nshape = [s for s in oshape if s!=1]

            

Reported by Pylint.

Redundant use of assertTrue with constant value False
Error

Line: 132 Column: 13

                          print(Y1.flatten())
            print(Y0.flatten())
            print(np.max(np.abs(Y1 - Y0)))
            self.assertTrue(False)


if __name__ == "__main__":
    unittest.main()

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              




import unittest
import hypothesis.strategies as st
from hypothesis import given
import numpy as np

            

Reported by Pylint.

benchmarks/fastrnns/profile.py
33 issues
subprocess call with shell=True identified, security issue.
Security injection

Line: 58
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html

                  """Returns (return-code, stdout, stderr)"""
    print('[system] {}'.format(command))
    p = subprocess.Popen(command, stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE, shell=True)
    output, err = p.communicate()
    rc = p.returncode
    output = output.decode("ascii")
    err = err.decode("ascii")
    return rc, output, err

            

Reported by Bandit.

Unable to import 'torch'
Error

Line: 5 Column: 1

              import subprocess
import sys
import time
import torch
import datetime

from .runner import get_nn_runners



            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 8 Column: 1

              import torch
import datetime

from .runner import get_nn_runners


def run_rnn(name, rnn_creator, nloops=5,
            seqLength=100, numLayers=1, inputSize=512, hiddenSize=512,
            miniBatch=64, device='cuda', seed=None):

            

Reported by Pylint.

Unused argument 'name'
Error

Line: 11 Column: 13

              from .runner import get_nn_runners


def run_rnn(name, rnn_creator, nloops=5,
            seqLength=100, numLayers=1, inputSize=512, hiddenSize=512,
            miniBatch=64, device='cuda', seed=None):
    def run_iter(modeldef):
        # Forward
        forward_output = modeldef.forward(*modeldef.inputs)

            

Reported by Pylint.

Expression "[run_iter(modeldef) for _ in range(nloops)]" is assigned to nothing
Error

Line: 38 Column: 5

                                      miniBatch=miniBatch, device=device, seed=seed)
    modeldef = rnn_creator(**creator_args)

    [run_iter(modeldef) for _ in range(nloops)]


def profile(rnns, sleep_between_seconds=1, nloops=5,
            internal_run=True,  # Unused, get rid of this TODO
            seqLength=100, numLayers=1, inputSize=512, hiddenSize=512,

            

Reported by Pylint.

Unused argument 'internal_run'
Error

Line: 42 Column: 13

              

def profile(rnns, sleep_between_seconds=1, nloops=5,
            internal_run=True,  # Unused, get rid of this TODO
            seqLength=100, numLayers=1, inputSize=512, hiddenSize=512,
            miniBatch=64, device='cuda', seed=None):
    params = dict(seqLength=seqLength, numLayers=numLayers,
                  inputSize=inputSize, hiddenSize=hiddenSize,
                  miniBatch=miniBatch, device=device, seed=seed)

            

Reported by Pylint.

Redefining name 'args' from outer scope (line 126)
Error

Line: 92 Column: 1

                  return system('nvprof -o {} {}'.format(outpath, cmd))


def full_profile(rnns, **args):
    profile_args = []
    for k, v in args.items():
        profile_args.append('--{}={}'.format(k, v))
    profile_args.append('--rnns {}'.format(' '.join(rnns)))
    profile_args.append('--internal_run')

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import argparse
import subprocess
import sys
import time
import torch
import datetime

from .runner import get_nn_runners


            

Reported by Pylint.

Consider possible security implications associated with subprocess module.
Security blacklist

Line: 2
Suggestion: https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b404-import-subprocess

              import argparse
import subprocess
import sys
import time
import torch
import datetime

from .runner import get_nn_runners


            

Reported by Bandit.

standard import "import datetime" should be placed before "import torch"
Error

Line: 6 Column: 1

              import sys
import time
import torch
import datetime

from .runner import get_nn_runners


def run_rnn(name, rnn_creator, nloops=5,

            

Reported by Pylint.

test/quantization/eager/test_equalize_eager.py
33 issues
Unable to import 'torch'
Error

Line: 1 Column: 1

              import torch
import torch.nn as nn

from torch.testing._internal.common_quantization import QuantizationTestCase
from torch.quantization.fuse_modules import fuse_modules

import torch.quantization._equalize as _equalize

import copy

            

Reported by Pylint.

Unable to import 'torch.nn'
Error

Line: 2 Column: 1

              import torch
import torch.nn as nn

from torch.testing._internal.common_quantization import QuantizationTestCase
from torch.quantization.fuse_modules import fuse_modules

import torch.quantization._equalize as _equalize

import copy

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_quantization'
Error

Line: 4 Column: 1

              import torch
import torch.nn as nn

from torch.testing._internal.common_quantization import QuantizationTestCase
from torch.quantization.fuse_modules import fuse_modules

import torch.quantization._equalize as _equalize

import copy

            

Reported by Pylint.

Unable to import 'torch.quantization.fuse_modules'
Error

Line: 5 Column: 1

              import torch.nn as nn

from torch.testing._internal.common_quantization import QuantizationTestCase
from torch.quantization.fuse_modules import fuse_modules

import torch.quantization._equalize as _equalize

import copy


            

Reported by Pylint.

Unable to import 'torch.quantization._equalize'
Error

Line: 7 Column: 1

              from torch.testing._internal.common_quantization import QuantizationTestCase
from torch.quantization.fuse_modules import fuse_modules

import torch.quantization._equalize as _equalize

import copy

class TestEqualizeEager(QuantizationTestCase):
    def checkChannelsEqualized(self, tensor1, tensor2, output_axis, input_axis):

            

Reported by Pylint.

Access to a protected member _modules of a client class
Error

Line: 29 Column: 20

                      curr = model
        name = name.split('.')
        for subname in name:
            curr = curr._modules[subname]
        return curr

    def test_cross_layer_equalization(self):
        ''' applies _equalize.cross_layer_equalization on two modules and checks
        to make sure channels ranges are equivalent

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 95 Column: 9

                      self.checkChannelsEqualized(linear1.weight, linear2.weight, 0, 1)
        self.checkChannelsEqualized(linear2.weight, linear3.weight, 0, 1)

        input = torch.randn(20, 3)
        self.assertEqual(chain1(input), chain2(input))

    def test_equalize_fused_convrelu(self):
        ''' Checks to see if eager mode equalization supports fused
        ConvReLU2d models

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 140 Column: 9

                      self.checkChannelsEqualized(conv1.weight, conv2.weight, 0, 1)
        self.checkChannelsEqualized(conv2.weight, conv3.weight, 0, 1)

        input = torch.randn(3, 3, 1, 1)
        self.assertEqual(fused_model1(input), fused_model2(input))
        self.assertEqual(fused_model1(input), model(input))

    def test_equalize_fused_linearrelu(self):
        ''' Checks to see if eager mode equalization supports fused

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 186 Column: 9

                      self.checkChannelsEqualized(linear1.weight, linear2.weight, 0, 1)
        self.checkChannelsEqualized(linear2.weight, linear3.weight, 0, 1)

        input = torch.randn(20, 3)
        self.assertEqual(fused_model1(input), fused_model2(input))
        self.assertEqual(fused_model1(input), model(input))

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
import torch.nn as nn

from torch.testing._internal.common_quantization import QuantizationTestCase
from torch.quantization.fuse_modules import fuse_modules

import torch.quantization._equalize as _equalize

import copy

            

Reported by Pylint.

caffe2/experiments/python/device_reduce_sum_bench.py
33 issues
Module 'itertools' has no 'imap' member
Error

Line: 71 Column: 18

                          ["y"]
        )

        for n in itertools.imap(pow, itertools.cycle([10]), range(10)):
            X = np.random.rand(n).astype(np.float32)
            logger.info('Running benchmark for n = {}'.format(n))
            ret = runOpBenchmark(gpu_do, op, inputs=[X])
            self.results.append((n, ret[1]))


            

Reported by Pylint.

Module 'itertools' has no 'imap' member
Error

Line: 86 Column: 18

                          ["y"]
        )

        for n in itertools.imap(pow, itertools.cycle([10]), range(10)):
            X = np.random.rand(n).astype(np.float32)
            logger.info('Running benchmark for n = {}'.format(n))
            ret = runOpBenchmark(gpu_do, op, inputs=[X])
            self.results.append((n, ret[1]))


            

Reported by Pylint.

Module 'itertools' has no 'imap' member
Error

Line: 101 Column: 18

                          ["probs", "avgloss"],
        )

        for n in itertools.imap(pow, itertools.cycle([10]), range(8)):
            for D in itertools.imap(pow, itertools.cycle([10]), range(3)):
                X = np.random.rand(n, D).astype(np.float32)
                label = (np.random.rand(n) * D).astype(np.int32)
                logger.info('Running benchmark for n = {}, D= {}'.format(n, D))
                ret = runOpBenchmark(gpu_do, op, inputs=[X, label])

            

Reported by Pylint.

Module 'itertools' has no 'imap' member
Error

Line: 102 Column: 22

                      )

        for n in itertools.imap(pow, itertools.cycle([10]), range(8)):
            for D in itertools.imap(pow, itertools.cycle([10]), range(3)):
                X = np.random.rand(n, D).astype(np.float32)
                label = (np.random.rand(n) * D).astype(np.int32)
                logger.info('Running benchmark for n = {}, D= {}'.format(n, D))
                ret = runOpBenchmark(gpu_do, op, inputs=[X, label])
                self.results.append(((n, D), ret[1]))

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 73 Column: 25

              
        for n in itertools.imap(pow, itertools.cycle([10]), range(10)):
            X = np.random.rand(n).astype(np.float32)
            logger.info('Running benchmark for n = {}'.format(n))
            ret = runOpBenchmark(gpu_do, op, inputs=[X])
            self.results.append((n, ret[1]))


class SumSqrElements(Benchmark):

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 88 Column: 25

              
        for n in itertools.imap(pow, itertools.cycle([10]), range(10)):
            X = np.random.rand(n).astype(np.float32)
            logger.info('Running benchmark for n = {}'.format(n))
            ret = runOpBenchmark(gpu_do, op, inputs=[X])
            self.results.append((n, ret[1]))


class SoftMaxWithLoss(Benchmark):

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 105 Column: 29

                          for D in itertools.imap(pow, itertools.cycle([10]), range(3)):
                X = np.random.rand(n, D).astype(np.float32)
                label = (np.random.rand(n) * D).astype(np.int32)
                logger.info('Running benchmark for n = {}, D= {}'.format(n, D))
                ret = runOpBenchmark(gpu_do, op, inputs=[X, label])
                self.results.append(((n, D), ret[1]))


def parse_args():

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software

            

Reported by Pylint.

Missing class docstring
Error

Line: 41 Column: 1

              ALL_BENCHMARKS = {}


class BenchmarkMeta(type):
    def __new__(metacls, name, bases, class_dict):
        cls = type.__new__(metacls, name, bases, class_dict)
        if name != 'Benchmark':
            ALL_BENCHMARKS[name] = cls
        return cls

            

Reported by Pylint.

Metaclass class method __new__ should have 'cls' as first argument
Error

Line: 42 Column: 5

              

class BenchmarkMeta(type):
    def __new__(metacls, name, bases, class_dict):
        cls = type.__new__(metacls, name, bases, class_dict)
        if name != 'Benchmark':
            ALL_BENCHMARKS[name] = cls
        return cls


            

Reported by Pylint.

caffe2/python/dataset.py
33 issues
An attribute defined in dataset line 213 hides this method
Error

Line: 267 Column: 5

                      """Return the list of field names for this dataset."""
        return self.fields

    def field_types(self):
        """
        Return the list of field dtypes for this dataset.

        If a list of strings, not a schema.Struct, was passed to the
        constructor, this will return a list of dtype(np.void).

            

Reported by Pylint.

Parameters differ from overridden 'setup_ex' method
Error

Line: 35 Column: 5

                      self.enforce_batch_size = enforce_batch_size
        self.cursor = None

    def setup_ex(self, init_net, exit_net):
        if self.cursor is None:
            self.cursor = init_net.CreateTreeCursor(
                [],
                init_net.NextScopedBlob(self.name),
                fields=self.dataset.fields)

            

Reported by Pylint.

Unused argument 'exit_net'
Error

Line: 35 Column: 34

                      self.enforce_batch_size = enforce_batch_size
        self.cursor = None

    def setup_ex(self, init_net, exit_net):
        if self.cursor is None:
            self.cursor = init_net.CreateTreeCursor(
                [],
                init_net.NextScopedBlob(self.name),
                fields=self.dataset.fields)

            

Reported by Pylint.

Parameters differ from overridden 'setup_ex' method
Error

Line: 71 Column: 5

                      self.loop_over = loop_over
        self.enforce_batch_size = enforce_batch_size

    def setup_ex(self, init_net, exit_net):
        if self.cursor is None:
            self.cursor = init_net.CreateTreeCursor(
                [],
                init_net.NextScopedBlob(self.name),
                fields=self.dataset.fields)

            

Reported by Pylint.

Unused argument 'exit_net'
Error

Line: 71 Column: 34

                      self.loop_over = loop_over
        self.enforce_batch_size = enforce_batch_size

    def setup_ex(self, init_net, exit_net):
        if self.cursor is None:
            self.cursor = init_net.CreateTreeCursor(
                [],
                init_net.NextScopedBlob(self.name),
                fields=self.dataset.fields)

            

Reported by Pylint.

Attribute 'offsets' defined outside __init__
Error

Line: 86 Column: 9

                      offsets = net.ComputeOffset(
            [self.cursor] + self.dataset.content().field_blobs(),
            'offsets')
        self.offsets = offsets

    def sort_and_shuffle(self, net, sort_by_field=None,
                         shuffle_size=1, batch_size=1):
        # no sorting by default
        content = self.dataset.content()

            

Reported by Pylint.

Unused argument 'exit_net'
Error

Line: 130 Column: 34

                      self._content = content
        self.mutex = None

    def setup_ex(self, init_net, exit_net):
        if self.mutex is None:
            self.mutex = init_net.CreateMutex([])

    def write(self, writer_net, fields):
        """

            

Reported by Pylint.

Parameters differ from overridden 'setup_ex' method
Error

Line: 130 Column: 5

                      self._content = content
        self.mutex = None

    def setup_ex(self, init_net, exit_net):
        if self.mutex is None:
            self.mutex = init_net.CreateMutex([])

    def write(self, writer_net, fields):
        """

            

Reported by Pylint.

Unnecessary pass statement
Error

Line: 156 Column: 9

              
    def commit(self, finish_net):
        """Commit is a no-op for an in-memory dataset."""
        pass


def Const(net, value, dtype=None, name=None):
    """
    Create a 'constant' by first creating an external input in the given

            

Reported by Pylint.

Unused argument 'init_net'
Error

Line: 173 Column: 40

                  return blob


def execution_step_with_progress(name, init_net, substeps, rows_read):
    # progress reporter
    report_net = core.Net('report_net')
    report_net.Print([rows_read], [])
    return core.execution_step(
        name,

            

Reported by Pylint.