The following issues were found

torch/fx/experimental/unification/variable.py
18 issues
Attempted relative import beyond top-level package
Error

Line: 2 Column: 1

              from contextlib import contextmanager
from .utils import hashable
from .dispatch import dispatch

_global_logic_variables = set()  # type: ignore[var-annotated]
_glv = _global_logic_variables


class Var(object):

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 3 Column: 1

              from contextlib import contextmanager
from .utils import hashable
from .dispatch import dispatch

_global_logic_variables = set()  # type: ignore[var-annotated]
_glv = _global_logic_variables


class Var(object):

            

Reported by Pylint.

function already defined line 44
Error

Line: 50 Column: 1

              isvar

@dispatch(object)  # type: ignore[no-redef]
def isvar(o):
    return not not _glv and hashable(o) and o in _glv


@contextmanager
def variables(*variables):

            

Reported by Pylint.

Lambda may not be necessary
Error

Line: 37 Column: 12

              

def var():
    return lambda *args: Var(*args)

def vars():
    return lambda n: [var() for i in range(n)]



            

Reported by Pylint.

Redefining built-in 'vars'
Error

Line: 39 Column: 1

              def var():
    return lambda *args: Var(*args)

def vars():
    return lambda n: [var() for i in range(n)]


@dispatch(Var)
def isvar(v):

            

Reported by Pylint.

Unused argument 'v'
Error

Line: 44 Column: 11

              

@dispatch(Var)
def isvar(v):
    return True

isvar

@dispatch(object)  # type: ignore[no-redef]

            

Reported by Pylint.

Statement seems to have no effect
Error

Line: 47 Column: 1

              def isvar(v):
    return True

isvar

@dispatch(object)  # type: ignore[no-redef]
def isvar(o):
    return not not _glv and hashable(o) and o in _glv


            

Reported by Pylint.

Redefining name 'variables' from outer scope (line 55)
Error

Line: 55 Column: 1

              

@contextmanager
def variables(*variables):
    """ Context manager for logic variables
    >>> from __future__ import with_statement
    >>> with variables(1):
    ...     print(isvar(1))
    True

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from contextlib import contextmanager
from .utils import hashable
from .dispatch import dispatch

_global_logic_variables = set()  # type: ignore[var-annotated]
_glv = _global_logic_variables


class Var(object):

            

Reported by Pylint.

Class 'Var' inherits from object, can be safely removed from bases in python3
Error

Line: 9 Column: 1

              _glv = _global_logic_variables


class Var(object):
    """ Logic Variable """

    _id = 1

    def __new__(cls, *token):

            

Reported by Pylint.

torch/jit/_serialization.py
18 issues
Module 'torch' has no 'device' member
Error

Line: 173 Column: 24

              
def validate_map_location(map_location=None):
    if isinstance(map_location, str):
        map_location = torch.device(map_location)
    elif not (map_location is None or isinstance(map_location, torch.device)):
        raise ValueError(
            "map_location should be either None, string or torch.device, "
            "but got type: " + str(type(map_location))
        )

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 174 Column: 64

              def validate_map_location(map_location=None):
    if isinstance(map_location, str):
        map_location = torch.device(map_location)
    elif not (map_location is None or isinstance(map_location, torch.device)):
        raise ValueError(
            "map_location should be either None, string or torch.device, "
            "but got type: " + str(type(map_location))
        )


            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 159 Column: 10

                  if _extra_files is None:
        _extra_files = {}

    cu = torch._C.CompilationUnit()
    if isinstance(f, str) or isinstance(f, pathlib.Path):
        cpp_module = torch._C.import_ir_module(cu, str(f), map_location, _extra_files)
    else:
        cpp_module = torch._C.import_ir_module_from_buffer(
            cu, f.read(), map_location, _extra_files

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 161 Column: 22

              
    cu = torch._C.CompilationUnit()
    if isinstance(f, str) or isinstance(f, pathlib.Path):
        cpp_module = torch._C.import_ir_module(cu, str(f), map_location, _extra_files)
    else:
        cpp_module = torch._C.import_ir_module_from_buffer(
            cu, f.read(), map_location, _extra_files
        )


            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 163 Column: 22

                  if isinstance(f, str) or isinstance(f, pathlib.Path):
        cpp_module = torch._C.import_ir_module(cu, str(f), map_location, _extra_files)
    else:
        cpp_module = torch._C.import_ir_module_from_buffer(
            cu, f.read(), map_location, _extra_files
        )

    # TODO: Pretty sure this approach loses ConstSequential status and such
    return wrap_cpp_module(cpp_module)

            

Reported by Pylint.

TODO: Pretty sure this approach loses ConstSequential status and such
Error

Line: 167 Column: 3

                          cu, f.read(), map_location, _extra_files
        )

    # TODO: Pretty sure this approach loses ConstSequential status and such
    return wrap_cpp_module(cpp_module)


def validate_map_location(map_location=None):
    if isinstance(map_location, str):

            

Reported by Pylint.

Argument name "f" doesn't conform to snake_case naming style
Error

Line: 19 Column: 1

              from torch.serialization import validate_cuda_device


def save(m, f, _extra_files=None):
    r"""
    Save an offline version of this module for use in a separate process. The
    saved module serializes all of the methods, submodules, parameters, and
    attributes of this module. It can be loaded into the C++ API using
    ``torch::jit::load(filename)`` or into the Python API with

            

Reported by Pylint.

Argument name "m" doesn't conform to snake_case naming style
Error

Line: 19 Column: 1

              from torch.serialization import validate_cuda_device


def save(m, f, _extra_files=None):
    r"""
    Save an offline version of this module for use in a separate process. The
    saved module serializes all of the methods, submodules, parameters, and
    attributes of this module. It can be loaded into the C++ API using
    ``torch::jit::load(filename)`` or into the Python API with

            

Reported by Pylint.

Consider merging these isinstance calls to isinstance(f, (pathlib.Path, str))
Error

Line: 80 Column: 8

                  """
    if _extra_files is None:
        _extra_files = {}
    if isinstance(f, str) or isinstance(f, pathlib.Path):
        m.save(f, _extra_files=_extra_files)
    else:
        ret = m.save_to_buffer(_extra_files=_extra_files)
        f.write(ret)


            

Reported by Pylint.

Argument name "f" doesn't conform to snake_case naming style
Error

Line: 87 Column: 1

                      f.write(ret)


def load(f, map_location=None, _extra_files=None):
    r"""
    Load a :class:`ScriptModule` or :class:`ScriptFunction` previously
    saved with :func:`torch.jit.save <torch.jit.save>`

    All previously saved modules, no matter their device, are first loaded onto CPU,

            

Reported by Pylint.

torch/utils/_pytree.py
18 issues
String statement has no effect
Error

Line: 4 Column: 1

              from typing import NamedTuple, Callable, Any, Tuple, List, Dict, Type, cast, Optional
from collections import namedtuple

"""
Contains utility functions for working with nested python data structures.

A *pytree* is Python nested data structure. It is a tree in the sense that
nodes are Python collections (e.g., list, tuple, dict) and the leaves are
Python values. Furthermore, a pytree should not contain reference cycles.

            

Reported by Pylint.

Unused argument 'context'
Error

Line: 51 Column: 40

              def _list_flatten(d: List[Any]) -> Tuple[List[Any], Context]:
    return d, None

def _list_unflatten(values: List[Any], context: Context) -> List[Any]:
    return list(values)

def _tuple_flatten(d: Tuple[Any, ...]) -> Tuple[List[Any], Context]:
    return list(d), None


            

Reported by Pylint.

Unused argument 'context'
Error

Line: 57 Column: 41

              def _tuple_flatten(d: Tuple[Any, ...]) -> Tuple[List[Any], Context]:
    return list(d), None

def _tuple_unflatten(values: List[Any], context: Context) -> Tuple[Any, ...]:
    return tuple(values)

def _namedtuple_flatten(d: NamedTuple) -> Tuple[List[Any], Context]:
    return list(d), type(d)


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from typing import NamedTuple, Callable, Any, Tuple, List, Dict, Type, cast, Optional
from collections import namedtuple

"""
Contains utility functions for working with nested python data structures.

A *pytree* is Python nested data structure. It is a tree in the sense that
nodes are Python collections (e.g., list, tuple, dict) and the leaves are
Python values. Furthermore, a pytree should not contain reference cycles.

            

Reported by Pylint.

Missing class docstring
Error

Line: 33 Column: 1

              FlattenFunc = Callable[[PyTree], Tuple[List, Context]]
UnflattenFunc = Callable[[List, Context], PyTree]

class NodeDef(NamedTuple):
    flatten_fn: FlattenFunc
    unflatten_fn: UnflattenFunc

SUPPORTED_NODES: Dict[Type[Any], NodeDef] = {}


            

Reported by Pylint.

Argument name "d" doesn't conform to snake_case naming style
Error

Line: 42 Column: 1

              def _register_pytree_node(typ: Any, flatten_fn: FlattenFunc, unflatten_fn: UnflattenFunc) -> None:
    SUPPORTED_NODES[typ] = NodeDef(flatten_fn, unflatten_fn)

def _dict_flatten(d: Dict[Any, Any]) -> Tuple[List[Any], Context]:
    return list(d.values()), list(d.keys())

def _dict_unflatten(values: List[Any], context: Context) -> Dict[Any, Any]:
    return {key: value for key, value in zip(context, values)}


            

Reported by Pylint.

Unnecessary use of a comprehension
Error

Line: 46 Column: 1

                  return list(d.values()), list(d.keys())

def _dict_unflatten(values: List[Any], context: Context) -> Dict[Any, Any]:
    return {key: value for key, value in zip(context, values)}

def _list_flatten(d: List[Any]) -> Tuple[List[Any], Context]:
    return d, None

def _list_unflatten(values: List[Any], context: Context) -> List[Any]:

            

Reported by Pylint.

Argument name "d" doesn't conform to snake_case naming style
Error

Line: 48 Column: 1

              def _dict_unflatten(values: List[Any], context: Context) -> Dict[Any, Any]:
    return {key: value for key, value in zip(context, values)}

def _list_flatten(d: List[Any]) -> Tuple[List[Any], Context]:
    return d, None

def _list_unflatten(values: List[Any], context: Context) -> List[Any]:
    return list(values)


            

Reported by Pylint.

Argument name "d" doesn't conform to snake_case naming style
Error

Line: 54 Column: 1

              def _list_unflatten(values: List[Any], context: Context) -> List[Any]:
    return list(values)

def _tuple_flatten(d: Tuple[Any, ...]) -> Tuple[List[Any], Context]:
    return list(d), None

def _tuple_unflatten(values: List[Any], context: Context) -> Tuple[Any, ...]:
    return tuple(values)


            

Reported by Pylint.

Argument name "d" doesn't conform to snake_case naming style
Error

Line: 60 Column: 1

              def _tuple_unflatten(values: List[Any], context: Context) -> Tuple[Any, ...]:
    return tuple(values)

def _namedtuple_flatten(d: NamedTuple) -> Tuple[List[Any], Context]:
    return list(d), type(d)

def _namedtuple_unflatten(values: List[Any], context: Context) -> NamedTuple:
    return cast(NamedTuple, context(*values))


            

Reported by Pylint.

torch/utils/checkpoint.py
18 issues
Module 'torch' has no 'is_autocast_enabled' member
Error

Line: 62 Column: 35

                      check_backward_validity(args)
        ctx.run_function = run_function
        ctx.preserve_rng_state = preserve_rng_state
        ctx.had_autocast_in_fwd = torch.is_autocast_enabled()
        if preserve_rng_state:
            ctx.fwd_cpu_state = torch.get_rng_state()
            # Don't eagerly initialize the cuda context by accident.
            # (If the user intends that the context is initialized later, within their
            # run_function, we SHOULD actually stash the cuda state here.  Unfortunately,

            

Reported by Pylint.

Parameters differ from overridden 'forward' method
Error

Line: 58 Column: 5

              class CheckpointFunction(torch.autograd.Function):

    @staticmethod
    def forward(ctx, run_function, preserve_rng_state, *args):
        check_backward_validity(args)
        ctx.run_function = run_function
        ctx.preserve_rng_state = preserve_rng_state
        ctx.had_autocast_in_fwd = torch.is_autocast_enabled()
        if preserve_rng_state:

            

Reported by Pylint.

Access to a protected member _initialized of a client class
Error

Line: 70 Column: 16

                          # run_function, we SHOULD actually stash the cuda state here.  Unfortunately,
            # we have no way to anticipate this will happen before we run the function.)
            ctx.had_cuda_in_fwd = False
            if torch.cuda._initialized:
                ctx.had_cuda_in_fwd = True
                ctx.fwd_gpu_devices, ctx.fwd_gpu_states = get_device_states(*args)

        # Save non-tensor inputs in ctx, keep a placeholder None for tensors
        # to be filled out during the backward.

            

Reported by Pylint.

Access to a protected member _is_checkpoint_valid of a client class
Error

Line: 95 Column: 16

              
    @staticmethod
    def backward(ctx, *args):
        if not torch.autograd._is_checkpoint_valid():
            raise RuntimeError(
                "Checkpointing is not compatible with .grad() or when an `inputs` parameter"
                " is passed to .backward(). Please use .backward() and do not pass its `inputs`"
                " argument.")
        # Copy the list to avoid modifying original list.

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 214 Column: 48

                  return CheckpointFunction.apply(function, preserve, *args)


def checkpoint_sequential(functions, segments, input, **kwargs):
    r"""A helper function for checkpointing sequential models.

    Sequential models execute a list of modules/functions in order
    (sequentially). Therefore, we can divide such a model in various segments
    and checkpoint each segment. All segments except the last will run in

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 261 Column: 21

                      raise ValueError("Unexpected keyword arguments: " + ",".join(arg for arg in kwargs))

    def run_function(start, end, functions):
        def forward(input):
            for j in range(start, end + 1):
                input = functions[j](input)
            return input
        return forward


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
import warnings
from typing import Any, Iterable, List, Tuple


def detach_variable(inputs: Tuple[Any, ...]) -> Tuple[torch.Tensor, ...]:
    if isinstance(inputs, tuple):
        out = []
        for inp in inputs:

            

Reported by Pylint.

standard import "import warnings" should be placed before "import torch"
Error

Line: 2 Column: 1

              import torch
import warnings
from typing import Any, Iterable, List, Tuple


def detach_variable(inputs: Tuple[Any, ...]) -> Tuple[torch.Tensor, ...]:
    if isinstance(inputs, tuple):
        out = []
        for inp in inputs:

            

Reported by Pylint.

standard import "from typing import Any, Iterable, List, Tuple" should be placed before "import torch"
Error

Line: 3 Column: 1

              import torch
import warnings
from typing import Any, Iterable, List, Tuple


def detach_variable(inputs: Tuple[Any, ...]) -> Tuple[torch.Tensor, ...]:
    if isinstance(inputs, tuple):
        out = []
        for inp in inputs:

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 6 Column: 1

              from typing import Any, Iterable, List, Tuple


def detach_variable(inputs: Tuple[Any, ...]) -> Tuple[torch.Tensor, ...]:
    if isinstance(inputs, tuple):
        out = []
        for inp in inputs:
            if not isinstance(inp, torch.Tensor):
                out.append(inp)

            

Reported by Pylint.

torch/random.py
17 issues
Access to a protected member _C of a client class
Error

Line: 25 Column: 26

                  return default_generator.get_state()


def manual_seed(seed) -> torch._C.Generator:
    r"""Sets the seed for generating random numbers. Returns a
    `torch.Generator` object.

    Args:
        seed (int): The desired seed. Value must be within the inclusive range

            

Reported by Pylint.

Redefining name 'seed' from outer scope (line 44)
Error

Line: 25 Column: 17

                  return default_generator.get_state()


def manual_seed(seed) -> torch._C.Generator:
    r"""Sets the seed for generating random numbers. Returns a
    `torch.Generator` object.

    Args:
        seed (int): The desired seed. Value must be within the inclusive range

            

Reported by Pylint.

Redefining name 'torch' from outer scope (line 5)
Error

Line: 36 Column: 5

                          `0xffff_ffff_ffff_ffff + seed`.
    """
    seed = int(seed)
    import torch.cuda

    if not torch.cuda._is_in_bad_fork():
        torch.cuda.manual_seed_all(seed)

    return default_generator.manual_seed(seed)

            

Reported by Pylint.

Access to a protected member _is_in_bad_fork of a client class
Error

Line: 38 Column: 12

                  seed = int(seed)
    import torch.cuda

    if not torch.cuda._is_in_bad_fork():
        torch.cuda.manual_seed_all(seed)

    return default_generator.manual_seed(seed)



            

Reported by Pylint.

Redefining name 'seed' from outer scope (line 44)
Error

Line: 48 Column: 5

                  r"""Sets the seed for generating random numbers to a non-deterministic
    random number. Returns a 64 bit number used to seed the RNG.
    """
    seed = default_generator.seed()
    import torch.cuda

    if not torch.cuda._is_in_bad_fork():
        torch.cuda.manual_seed_all(seed)


            

Reported by Pylint.

Redefining name 'torch' from outer scope (line 5)
Error

Line: 49 Column: 5

                  random number. Returns a 64 bit number used to seed the RNG.
    """
    seed = default_generator.seed()
    import torch.cuda

    if not torch.cuda._is_in_bad_fork():
        torch.cuda.manual_seed_all(seed)

    return seed

            

Reported by Pylint.

Access to a protected member _is_in_bad_fork of a client class
Error

Line: 51 Column: 12

                  seed = default_generator.seed()
    import torch.cuda

    if not torch.cuda._is_in_bad_fork():
        torch.cuda.manual_seed_all(seed)

    return seed



            

Reported by Pylint.

Redefining name 'torch' from outer scope (line 5)
Error

Line: 84 Column: 5

                          to delete it and unindent your Python code under it.
    """

    import torch.cuda
    global _fork_rng_warned_already

    # Internal arguments:
    #   _caller: the function which called fork_rng, which the user used
    #   _devices_kw: the devices keyword of _caller

            

Reported by Pylint.

Using the global statement
Error

Line: 85 Column: 5

                  """

    import torch.cuda
    global _fork_rng_warned_already

    # Internal arguments:
    #   _caller: the function which called fork_rng, which the user used
    #   _devices_kw: the devices keyword of _caller


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import contextlib
import warnings

from torch._C import default_generator
import torch


def set_rng_state(new_state: torch.Tensor) -> None:
    r"""Sets the random number generator state.

            

Reported by Pylint.

torch/nn/quantized/modules/activation.py
17 issues
Module 'torch' has no 'tensor' member; maybe 'Tensor'?
Error

Line: 102 Column: 39

                               inplace: bool = False, device=None, dtype=None) -> None:
        factory_kwargs = {'device': device, 'dtype': dtype}
        super().__init__(negative_slope, inplace)
        self.register_buffer('scale', torch.tensor(scale, **factory_kwargs))
        self.register_buffer('zero_point', torch.tensor(zero_point, **factory_kwargs))

    def forward(self, input):
        return torch.ops.quantized.leaky_relu(
            input, self.negative_slope, self.inplace, self.scale, self.zero_point)

            

Reported by Pylint.

Module 'torch' has no 'tensor' member; maybe 'Tensor'?
Error

Line: 103 Column: 44

                      factory_kwargs = {'device': device, 'dtype': dtype}
        super().__init__(negative_slope, inplace)
        self.register_buffer('scale', torch.tensor(scale, **factory_kwargs))
        self.register_buffer('zero_point', torch.tensor(zero_point, **factory_kwargs))

    def forward(self, input):
        return torch.ops.quantized.leaky_relu(
            input, self.negative_slope, self.inplace, self.scale, self.zero_point)


            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 31 Column: 23

                      super(ReLU6, self).__init__(inplace)
        self.inplace = inplace

    def forward(self, input):
        return torch.ops.quantized.relu6(input, self.inplace)

    def _get_name(self):
        return 'QuantizedReLU6'


            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 53 Column: 23

                      self.scale = scale
        self.zero_point = zero_point

    def forward(self, input):
        return torch.nn.quantized.functional.hardswish(
            input, scale=self.scale, zero_point=self.zero_point)

    def _get_name(self):
        return 'QuantizedHardswish'

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 78 Column: 23

                      self.scale = scale
        self.zero_point = zero_point

    def forward(self, input):
        return torch.nn.quantized.functional.elu(
            input, self.scale, self.zero_point, self.alpha)

    def _get_name(self):
        return 'QuantizedELU'

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 105 Column: 23

                      self.register_buffer('scale', torch.tensor(scale, **factory_kwargs))
        self.register_buffer('zero_point', torch.tensor(zero_point, **factory_kwargs))

    def forward(self, input):
        return torch.ops.quantized.leaky_relu(
            input, self.negative_slope, self.inplace, self.scale, self.zero_point)

    def _get_name(self):
        return 'QuantizedLeakyReLU'

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 130 Column: 23

                      self.output_scale = output_scale
        self.output_zero_point = output_zero_point

    def forward(self, input):
        return torch.ops.quantized.sigmoid(input, self.output_scale, self.output_zero_point)

    @classmethod
    def from_float(cls, mod):
        output_scale, output_zero_point = mod.activation_post_process.calculate_qparams()

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
import torch.nn.quantized.functional

class ReLU6(torch.nn.ReLU):
    r"""Applies the element-wise function:

    :math:`\text{ReLU6}(x) = \min(\max(x_0, x), q(6))`, where :math:`x_0` is the
    zero_point, and :math:`q(6)` is the quantized representation of number 6.


            

Reported by Pylint.

Consider using Python 3 style super() without arguments
Error

Line: 28 Column: 9

                      >>> output = m(input)
    """
    def __init__(self, inplace=False):
        super(ReLU6, self).__init__(inplace)
        self.inplace = inplace

    def forward(self, input):
        return torch.ops.quantized.relu6(input, self.inplace)


            

Reported by Pylint.

Missing function or method docstring
Error

Line: 38 Column: 5

                      return 'QuantizedReLU6'

    @staticmethod
    def from_float(mod):
        return ReLU6(mod.inplace)

class Hardswish(torch.nn.Hardswish):
    r"""This is the quantized version of :class:`~torch.nn.Hardswish`.


            

Reported by Pylint.

torch/testing/_internal/opinfo_helper.py
17 issues
Module 'torch' has no 'bool' member
Error

Line: 58 Column: 44

                      return _dynamic_dispatch_dtypes(())

    supported_dtypes = set()
    for dtype in all_types_and_complex_and(torch.bool, torch.bfloat16, torch.half):
        try:
            samples = sample_inputs_fn(op, device_type, dtype, False)
        except RuntimeError:
            # If `sample_inputs_fn` doesn't support sampling for a given
            # `dtype`, we assume that the `dtype` is not supported.

            

Reported by Pylint.

Module 'torch' has no 'bfloat16' member
Error

Line: 58 Column: 56

                      return _dynamic_dispatch_dtypes(())

    supported_dtypes = set()
    for dtype in all_types_and_complex_and(torch.bool, torch.bfloat16, torch.half):
        try:
            samples = sample_inputs_fn(op, device_type, dtype, False)
        except RuntimeError:
            # If `sample_inputs_fn` doesn't support sampling for a given
            # `dtype`, we assume that the `dtype` is not supported.

            

Reported by Pylint.

Module 'torch' has no 'half' member
Error

Line: 58 Column: 72

                      return _dynamic_dispatch_dtypes(())

    supported_dtypes = set()
    for dtype in all_types_and_complex_and(torch.bool, torch.bfloat16, torch.half):
        try:
            samples = sample_inputs_fn(op, device_type, dtype, False)
        except RuntimeError:
            # If `sample_inputs_fn` doesn't support sampling for a given
            # `dtype`, we assume that the `dtype` is not supported.

            

Reported by Pylint.

Unused variable 're'
Error

Line: 75 Column: 13

                      for sample in samples:
            try:
                op(sample.input, *sample.args, **sample.kwargs)
            except RuntimeError as re:
                # dtype is not supported
                supported = False
                break

        if supported:

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import collections
import warnings
from functools import partial

import torch
from torch.testing._internal.common_cuda import (TEST_CUDA)
from torch.testing._core import _dispatch_dtypes
from torch.testing import (all_types_and_complex_and,
                           all_types_and_complex,

            

Reported by Pylint.

Class name "_dynamic_dispatch_dtypes" doesn't conform to PascalCase naming style
Error

Line: 45 Column: 1

              # Better way to acquire devices?
DEVICES = ['cpu'] + (['cuda'] if TEST_CUDA else [])

class _dynamic_dispatch_dtypes(_dispatch_dtypes):
    # Class to tag the dynamically generated types.
    pass


def get_supported_dtypes(op, sample_inputs_fn, device_type):

            

Reported by Pylint.

Argument name "op" doesn't conform to snake_case naming style
Error

Line: 50 Column: 1

                  pass


def get_supported_dtypes(op, sample_inputs_fn, device_type):
    # Returns the supported dtypes for the given operator and device_type pair.
    assert device_type in ['cpu', 'cuda']
    if not TEST_CUDA and device_type == 'cuda':
        warnings.warn("WARNING: CUDA is not available, empty_dtypes dispatch will be returned!")
        return _dynamic_dispatch_dtypes(())

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 50 Column: 1

                  pass


def get_supported_dtypes(op, sample_inputs_fn, device_type):
    # Returns the supported dtypes for the given operator and device_type pair.
    assert device_type in ['cpu', 'cuda']
    if not TEST_CUDA and device_type == 'cuda':
        warnings.warn("WARNING: CUDA is not available, empty_dtypes dispatch will be returned!")
        return _dynamic_dispatch_dtypes(())

            

Reported by Pylint.

Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.
Security

Line: 52
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html

              
def get_supported_dtypes(op, sample_inputs_fn, device_type):
    # Returns the supported dtypes for the given operator and device_type pair.
    assert device_type in ['cpu', 'cuda']
    if not TEST_CUDA and device_type == 'cuda':
        warnings.warn("WARNING: CUDA is not available, empty_dtypes dispatch will be returned!")
        return _dynamic_dispatch_dtypes(())

    supported_dtypes = set()

            

Reported by Bandit.

Line too long (107/100)
Error

Line: 66 Column: 1

                          # `dtype`, we assume that the `dtype` is not supported.
            # We raise a warning, so that user knows that this was the case
            # and can investigate if there was an issue with the `sample_inputs_fn`.
            warnings.warn(f"WARNING: Unable to generate sample for device:{device_type} and dtype:{dtype}")
            continue

        # We assume the dtype is supported
        # only if all samples pass for the given dtype.
        supported = True

            

Reported by Pylint.

torch/utils/hipify/cuda_to_hip_mappings.py
17 issues
Attempted relative import beyond top-level package
Error

Line: 3 Column: 1

              import collections

from .constants import (API_BLAS, API_C10, API_CAFFE2, API_DRIVER, API_FFT,
                        API_PYTORCH, API_RAND, API_ROCTX, API_RTC, API_RUNTIME,
                        API_SPARSE, CONV_CACHE, CONV_CONTEXT, CONV_D3D9,
                        CONV_D3D10, CONV_D3D11, CONV_DEF, CONV_DEVICE,
                        CONV_DEVICE_FUNC, CONV_EGL, CONV_ERROR, CONV_EVENT,
                        CONV_EXEC, CONV_GL, CONV_GRAPHICS, CONV_INCLUDE,
                        CONV_INCLUDE_CUDA_MAIN_H, CONV_INIT, CONV_JIT,

            

Reported by Pylint.

String statement has no effect
Error

Line: 16 Column: 1

                                      CONV_SURFACE, CONV_TEX, CONV_THREAD, CONV_TYPE,
                        CONV_VDPAU, CONV_VERSION, HIP_UNSUPPORTED)

""" Mapping of CUDA functions, include files, constants, and types to ROCm/HIP equivalents
This closely follows the implementation in hipify-clang
https://github.com/ROCm-Developer-Tools/HIP/blob/master/hipify-clang/src/CUDA2HipMap.cpp
and its structure.
There are different maps for fundamental names, include files, identifies, sparse, and
PyTorch specific translations.

            

Reported by Pylint.

TODO: Undo this special-case; see the header for motivation behind this
Error

Line: 7993 Column: 3

                          "setCurrentCUDAStream",
            ("setCurrentHIPStreamMasqueradingAsCUDA", API_PYTORCH),
        ),
        # TODO: Undo this special-case; see the header for motivation behind this
        # hack.  It's VERY important this is only applied to PyTorch HIPify.
        (
            "c10/cuda/CUDAGuard.h",
            ("ATen/hip/impl/HIPGuardImplMasqueradingAsCUDA.h", API_PYTORCH),
        ),

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import collections

from .constants import (API_BLAS, API_C10, API_CAFFE2, API_DRIVER, API_FFT,
                        API_PYTORCH, API_RAND, API_ROCTX, API_RTC, API_RUNTIME,
                        API_SPARSE, CONV_CACHE, CONV_CONTEXT, CONV_D3D9,
                        CONV_D3D10, CONV_D3D11, CONV_DEF, CONV_DEVICE,
                        CONV_DEVICE_FUNC, CONV_EGL, CONV_ERROR, CONV_EVENT,
                        CONV_EXEC, CONV_GL, CONV_GRAPHICS, CONV_INCLUDE,
                        CONV_INCLUDE_CUDA_MAIN_H, CONV_INIT, CONV_JIT,

            

Reported by Pylint.

Too many lines in module (8173/1000)
Error

Line: 1 Column: 1

              import collections

from .constants import (API_BLAS, API_C10, API_CAFFE2, API_DRIVER, API_FFT,
                        API_PYTORCH, API_RAND, API_ROCTX, API_RTC, API_RUNTIME,
                        API_SPARSE, CONV_CACHE, CONV_CONTEXT, CONV_D3D9,
                        CONV_D3D10, CONV_D3D11, CONV_DEF, CONV_DEVICE,
                        CONV_DEVICE_FUNC, CONV_EGL, CONV_ERROR, CONV_EVENT,
                        CONV_EXEC, CONV_GL, CONV_GRAPHICS, CONV_INCLUDE,
                        CONV_INCLUDE_CUDA_MAIN_H, CONV_INIT, CONV_JIT,

            

Reported by Pylint.

Line too long (105/100)
Error

Line: 4057 Column: 1

                          "cudaHostRegisterIoMemory",
            ("hipHostRegisterIoMemory", CONV_MEM, API_RUNTIME),
        ),
        # ("warpSize", ("hipWarpSize", CONV_SPECIAL_FUNC, API_RUNTIME), (HIP actually uses warpSize...)),
        ("cudaEventCreate", ("hipEventCreate", CONV_EVENT, API_RUNTIME)),
        (
            "cudaEventCreateWithFlags",
            ("hipEventCreateWithFlags", CONV_EVENT, API_RUNTIME),
        ),

            

Reported by Pylint.

Line too long (120/100)
Error

Line: 7733 Column: 1

                      ("cub::", ("hipcub::", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::ArgMax", ("hipcub::ArgMax", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::ArgMin", ("hipcub::ArgMin", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::BLOCK_REDUCE_WARP_REDUCTIONS", ("hipcub::BLOCK_REDUCE_WARP_REDUCTIONS", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::BlockReduce", ("hipcub::BlockReduce", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::CachingDeviceAllocator", ("hipcub::CachingDeviceAllocator", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::CountingInputIterator", ("hipcub::CountingInputIterator", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceRadixSort", ("hipcub::DeviceRadixSort", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceReduce", ("hipcub::DeviceReduce", CONV_SPECIAL_FUNC, API_RUNTIME)),

            

Reported by Pylint.

Line too long (108/100)
Error

Line: 7735 Column: 1

                      ("cub::ArgMin", ("hipcub::ArgMin", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::BLOCK_REDUCE_WARP_REDUCTIONS", ("hipcub::BLOCK_REDUCE_WARP_REDUCTIONS", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::BlockReduce", ("hipcub::BlockReduce", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::CachingDeviceAllocator", ("hipcub::CachingDeviceAllocator", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::CountingInputIterator", ("hipcub::CountingInputIterator", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceRadixSort", ("hipcub::DeviceRadixSort", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceReduce", ("hipcub::DeviceReduce", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceRunLengthEncode", ("hipcub::DeviceRunLengthEncode", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceScan", ("hipcub::DeviceScan", CONV_SPECIAL_FUNC, API_RUNTIME)),

            

Reported by Pylint.

Line too long (106/100)
Error

Line: 7736 Column: 1

                      ("cub::BLOCK_REDUCE_WARP_REDUCTIONS", ("hipcub::BLOCK_REDUCE_WARP_REDUCTIONS", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::BlockReduce", ("hipcub::BlockReduce", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::CachingDeviceAllocator", ("hipcub::CachingDeviceAllocator", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::CountingInputIterator", ("hipcub::CountingInputIterator", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceRadixSort", ("hipcub::DeviceRadixSort", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceReduce", ("hipcub::DeviceReduce", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceRunLengthEncode", ("hipcub::DeviceRunLengthEncode", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceScan", ("hipcub::DeviceScan", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceSegmentedRadixSort", ("hipcub::DeviceSegmentedRadixSort", CONV_SPECIAL_FUNC, API_RUNTIME)),

            

Reported by Pylint.

Line too long (106/100)
Error

Line: 7739 Column: 1

                      ("cub::CountingInputIterator", ("hipcub::CountingInputIterator", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceRadixSort", ("hipcub::DeviceRadixSort", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceReduce", ("hipcub::DeviceReduce", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceRunLengthEncode", ("hipcub::DeviceRunLengthEncode", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceScan", ("hipcub::DeviceScan", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceSegmentedRadixSort", ("hipcub::DeviceSegmentedRadixSort", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceSegmentedReduce", ("hipcub::DeviceSegmentedReduce", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::DeviceSelect", ("hipcub::DeviceSelect", CONV_SPECIAL_FUNC, API_RUNTIME)),
        ("cub::KeyValuePair", ("hipcub::KeyValuePair", CONV_SPECIAL_FUNC, API_RUNTIME)),

            

Reported by Pylint.

torch/fx/experimental/merge_matmul.py
17 issues
Module 'torch' has no 'matmul' member
Error

Line: 157 Column: 61

                  # Populate rhs_users and lhs_users - maps from LHS/RHS matrix multiply operands to
    # the matmul of which they are the LHS/RHS.
    for node in gm.graph.nodes:
        if node.op != "call_function" or node.target is not torch.matmul:
            continue

        lhs, rhs = node.args

        # TODO: Properly handle aliasing caused by get_attr. For now,

            

Reported by Pylint.

Module 'torch' has no 'cat' member
Error

Line: 189 Column: 47

                      rhs = gm.graph.get_attr(rhs) if isinstance(rhs, str) else rhs

        # Concatenate all the LHS operands.
        merge_mm_cat = gm.graph.call_function(torch.cat, (lhs,), {})

        # Multiply the concatenated LHS operands with the one RHS. This will produce
        # the same results as all the individual matmuls involving rhs in the original graph,
        # but they will all be concatenated together.
        merge_mm = gm.graph.call_function(torch.matmul, (merge_mm_cat, rhs,), {})

            

Reported by Pylint.

Module 'torch' has no 'matmul' member
Error

Line: 194 Column: 43

                      # Multiply the concatenated LHS operands with the one RHS. This will produce
        # the same results as all the individual matmuls involving rhs in the original graph,
        # but they will all be concatenated together.
        merge_mm = gm.graph.call_function(torch.matmul, (merge_mm_cat, rhs,), {})

        # Split the result of the merged matmul using the shapes of the LHS operands
        # to ascertain how large each chunk should be.
        merge_mm_sizes = [
            gm.graph.call_function(get_first_dim, (l,), {}) for l in lhs

            

Reported by Pylint.

TODO: Properly handle aliasing caused by get_attr. For now,
Error

Line: 162 Column: 3

              
        lhs, rhs = node.args

        # TODO: Properly handle aliasing caused by get_attr. For now,
        # use the attribute name as the operand if the node is a
        # get_attr.
        lhs = lhs.target if lhs.op == "get_attr" else lhs
        rhs = rhs.target if rhs.op == "get_attr" else rhs


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch

from torch.fx.graph import Graph
from torch.fx.graph_module import GraphModule
from torch.fx.node import Node
from torch.fx._symbolic_trace import symbolic_trace

import itertools
import operator

            

Reported by Pylint.

standard import "import itertools" should be placed before "import torch"
Error

Line: 8 Column: 1

              from torch.fx.node import Node
from torch.fx._symbolic_trace import symbolic_trace

import itertools
import operator

from typing import Dict, List



            

Reported by Pylint.

standard import "import operator" should be placed before "import torch"
Error

Line: 9 Column: 1

              from torch.fx._symbolic_trace import symbolic_trace

import itertools
import operator

from typing import Dict, List


def get_first_dim(t: torch.Tensor) -> int:

            

Reported by Pylint.

standard import "from typing import Dict, List" should be placed before "import torch"
Error

Line: 11 Column: 1

              import itertools
import operator

from typing import Dict, List


def get_first_dim(t: torch.Tensor) -> int:
    """
    A free function primarily for use in the merge_matmul graph transformation below

            

Reported by Pylint.

Argument name "t" doesn't conform to snake_case naming style
Error

Line: 14 Column: 1

              from typing import Dict, List


def get_first_dim(t: torch.Tensor) -> int:
    """
    A free function primarily for use in the merge_matmul graph transformation below
    that returns the first dimension of a Tensor. This is necessary because torch.Tensor.shape
    is an attribute (and cannot be the target of a call_function node) and also helps save
    a getitem op in the graph.

            

Reported by Pylint.

Argument name "gm" doesn't conform to snake_case naming style
Error

Line: 30 Column: 1

                  return t.shape[0]


def legalize_graph(gm: GraphModule):
    """
    Replace the graph of the given GraphModule with one that contains the same nodes as the
    original, but in topologically sorted order.

    This is used by the merge_matmul transformation below, which disturbs the topologically sorted

            

Reported by Pylint.

torch/quantization/fuse_modules.py
17 issues
Attempted relative import beyond top-level package
Error

Line: 6 Column: 1

              
import torch.nn as nn

from .fuser_method_mappings import get_fuser_method
# for backward compatiblity
from .fuser_method_mappings import fuse_conv_bn  # noqa: F401
from .fuser_method_mappings import fuse_conv_bn_relu  # noqa: F401

from typing import List, Optional

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 8 Column: 1

              
from .fuser_method_mappings import get_fuser_method
# for backward compatiblity
from .fuser_method_mappings import fuse_conv_bn  # noqa: F401
from .fuser_method_mappings import fuse_conv_bn_relu  # noqa: F401

from typing import List, Optional

# Generalization of getattr

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 9 Column: 1

              from .fuser_method_mappings import get_fuser_method
# for backward compatiblity
from .fuser_method_mappings import fuse_conv_bn  # noqa: F401
from .fuser_method_mappings import fuse_conv_bn_relu  # noqa: F401

from typing import List, Optional

# Generalization of getattr
def _get_module(model, submodule_key):

            

Reported by Pylint.

Unused fuse_conv_bn imported from fuser_method_mappings
Error

Line: 8 Column: 1

              
from .fuser_method_mappings import get_fuser_method
# for backward compatiblity
from .fuser_method_mappings import fuse_conv_bn  # noqa: F401
from .fuser_method_mappings import fuse_conv_bn_relu  # noqa: F401

from typing import List, Optional

# Generalization of getattr

            

Reported by Pylint.

Unused fuse_conv_bn_relu imported from fuser_method_mappings
Error

Line: 9 Column: 1

              from .fuser_method_mappings import get_fuser_method
# for backward compatiblity
from .fuser_method_mappings import fuse_conv_bn  # noqa: F401
from .fuser_method_mappings import fuse_conv_bn_relu  # noqa: F401

from typing import List, Optional

# Generalization of getattr
def _get_module(model, submodule_key):

            

Reported by Pylint.

Access to a protected member _forward_pre_hooks of a client class
Error

Line: 52 Column: 35

                  fused = fuser_method(*mod_list)
    # NOTE: forward hooks not processed in the two following for loops will be lost after the fusion
    # Move pre forward hooks of the base module to resulting fused module
    for handle_id, pre_hook_fn in mod_list[0]._forward_pre_hooks.items():
        fused.register_forward_pre_hook(pre_hook_fn)
        del mod_list[0]._forward_pre_hooks[handle_id]
    # Move post forward hooks of the last module to resulting fused module
    for handle_id, hook_fn in mod_list[-1]._forward_hooks.items():
        fused.register_forward_hook(hook_fn)

            

Reported by Pylint.

Access to a protected member _forward_pre_hooks of a client class
Error

Line: 54 Column: 13

                  # Move pre forward hooks of the base module to resulting fused module
    for handle_id, pre_hook_fn in mod_list[0]._forward_pre_hooks.items():
        fused.register_forward_pre_hook(pre_hook_fn)
        del mod_list[0]._forward_pre_hooks[handle_id]
    # Move post forward hooks of the last module to resulting fused module
    for handle_id, hook_fn in mod_list[-1]._forward_hooks.items():
        fused.register_forward_hook(hook_fn)
        del mod_list[-1]._forward_hooks[handle_id]
    new_mod[0] = fused

            

Reported by Pylint.

Access to a protected member _forward_hooks of a client class
Error

Line: 56 Column: 31

                      fused.register_forward_pre_hook(pre_hook_fn)
        del mod_list[0]._forward_pre_hooks[handle_id]
    # Move post forward hooks of the last module to resulting fused module
    for handle_id, hook_fn in mod_list[-1]._forward_hooks.items():
        fused.register_forward_hook(hook_fn)
        del mod_list[-1]._forward_hooks[handle_id]
    new_mod[0] = fused

    for i in range(1, len(mod_list)):

            

Reported by Pylint.

Access to a protected member _forward_hooks of a client class
Error

Line: 58 Column: 13

                  # Move post forward hooks of the last module to resulting fused module
    for handle_id, hook_fn in mod_list[-1]._forward_hooks.items():
        fused.register_forward_hook(hook_fn)
        del mod_list[-1]._forward_hooks[handle_id]
    new_mod[0] = fused

    for i in range(1, len(mod_list)):
        identity = nn.Identity()
        identity.training = mod_list[0].training

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              
import copy

import torch.nn as nn

from .fuser_method_mappings import get_fuser_method
# for backward compatiblity
from .fuser_method_mappings import fuse_conv_bn  # noqa: F401
from .fuser_method_mappings import fuse_conv_bn_relu  # noqa: F401

            

Reported by Pylint.