The following issues were found

torch/nn/utils/convert_parameters.py
3 issues
Module 'torch' has no 'cat' member
Error

Line: 24 Column: 12

                      param_device = _check_param_device(param, param_device)

        vec.append(param.view(-1))
    return torch.cat(vec)


def vector_to_parameters(vec: torch.Tensor, parameters: Iterable[torch.Tensor]) -> None:
    r"""Convert one vector to the parameters


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
from typing import Iterable, Optional


def parameters_to_vector(parameters: Iterable[torch.Tensor]) -> torch.Tensor:
    r"""Convert parameters to one vector

    Args:
        parameters (Iterable[Tensor]): an iterator of Tensors that are the

            

Reported by Pylint.

standard import "from typing import Iterable, Optional" should be placed before "import torch"
Error

Line: 2 Column: 1

              import torch
from typing import Iterable, Optional


def parameters_to_vector(parameters: Iterable[torch.Tensor]) -> torch.Tensor:
    r"""Convert parameters to one vector

    Args:
        parameters (Iterable[Tensor]): an iterator of Tensors that are the

            

Reported by Pylint.

torch/nn/parallel/scatter_gather.py
3 issues
Attempted relative import beyond top-level package
Error

Line: 2 Column: 1

              import torch
from ._functions import Scatter, Gather

def is_namedtuple(obj):
    # Check if type was created from collections.namedtuple or a typing.NamedTuple.
    return (
        isinstance(obj, tuple) and hasattr(obj, "_asdict") and hasattr(obj, "_fields")
    )


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
from ._functions import Scatter, Gather

def is_namedtuple(obj):
    # Check if type was created from collections.namedtuple or a typing.NamedTuple.
    return (
        isinstance(obj, tuple) and hasattr(obj, "_asdict") and hasattr(obj, "_fields")
    )


            

Reported by Pylint.

Missing function or method docstring
Error

Line: 4 Column: 1

              import torch
from ._functions import Scatter, Gather

def is_namedtuple(obj):
    # Check if type was created from collections.namedtuple or a typing.NamedTuple.
    return (
        isinstance(obj, tuple) and hasattr(obj, "_asdict") and hasattr(obj, "_fields")
    )


            

Reported by Pylint.

torch/fx/experimental/refinement_types.py
3 issues
Missing module docstring
Error

Line: 1 Column: 1

              class Equality:
    def __init__(self, lhs, rhs):
        self.lhs = lhs
        self.rhs = rhs

    def __str__(self):
        return f'{self.lhs} = {self.rhs}'

    def __repr__(self):

            

Reported by Pylint.

Missing class docstring
Error

Line: 1 Column: 1

              class Equality:
    def __init__(self, lhs, rhs):
        self.lhs = lhs
        self.rhs = rhs

    def __str__(self):
        return f'{self.lhs} = {self.rhs}'

    def __repr__(self):

            

Reported by Pylint.

Unnecessary "else" after "return"
Error

Line: 13 Column: 9

                      return f'{self.lhs} = {self.rhs}'

    def __eq__(self, other):
        if isinstance(other, Equality):
            return self.lhs == other.lhs and self.rhs == other.rhs
        else:
            return False

            

Reported by Pylint.

torch/fx/passes/operator_support.py
3 issues
Attempted relative import beyond top-level package
Error

Line: 6 Column: 1

              import torch
import torch.fx

from .tools_common import get_node_target, CALLABLE_NODE_OPS


class OperatorSupport:
    """
    `_support_dict` maps node.target to supported inputs dtypes.

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from typing import Dict

import torch
import torch.fx

from .tools_common import get_node_target, CALLABLE_NODE_OPS


class OperatorSupport:

            

Reported by Pylint.

Too few public methods (1/2)
Error

Line: 9 Column: 1

              from .tools_common import get_node_target, CALLABLE_NODE_OPS


class OperatorSupport:
    """
    `_support_dict` maps node.target to supported inputs dtypes.

    node.target is retrived using helper function `get_node_target()`


            

Reported by Pylint.

torch/nn/quantized/_reference/modules/__init__.py
3 issues
Unable to import '__init__.linear'
Error

Line: 1 Column: 1

              from .linear import Linear
from .conv import Conv1d, Conv2d, Conv3d

__all__ = [
    'Linear',
    'Conv1d',
    'Conv2d',
    'Conv3d',
]

            

Reported by Pylint.

Unable to import '__init__.conv'
Error

Line: 2 Column: 1

              from .linear import Linear
from .conv import Conv1d, Conv2d, Conv3d

__all__ = [
    'Linear',
    'Conv1d',
    'Conv2d',
    'Conv3d',
]

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from .linear import Linear
from .conv import Conv1d, Conv2d, Conv3d

__all__ = [
    'Linear',
    'Conv1d',
    'Conv2d',
    'Conv3d',
]

            

Reported by Pylint.

torch/nn/quantized/dynamic/modules/__init__.py
3 issues
Unable to import '__init__.linear'
Error

Line: 2 Column: 1

              
from .linear import Linear
from .rnn import LSTM, GRU, LSTMCell, RNNCell, GRUCell

__all__ = [
    'Linear',
    'LSTM',
    'GRU',
    'LSTMCell',

            

Reported by Pylint.

Unable to import '__init__.rnn'
Error

Line: 3 Column: 1

              
from .linear import Linear
from .rnn import LSTM, GRU, LSTMCell, RNNCell, GRUCell

__all__ = [
    'Linear',
    'LSTM',
    'GRU',
    'LSTMCell',

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              
from .linear import Linear
from .rnn import LSTM, GRU, LSTMCell, RNNCell, GRUCell

__all__ = [
    'Linear',
    'LSTM',
    'GRU',
    'LSTMCell',

            

Reported by Pylint.

torch/utils/data/datapipes/iter/listdirfiles.py
3 issues
Missing module docstring
Error

Line: 1 Column: 1

              from torch.utils.data import IterDataPipe
from torch.utils.data.datapipes.utils.common import get_file_pathnames_from_root
from typing import List, Union, Iterator

class ListDirFilesIterDataPipe(IterDataPipe[str]):
    r""" :class:`ListDirFilesIterDataPipe`

    Iterable DataPipe to load file pathname(s) (path + filename), yield pathname from given disk root dir.
    args:

            

Reported by Pylint.

standard import "from typing import List, Union, Iterator" should be placed before "from torch.utils.data import IterDataPipe"
Error

Line: 3 Column: 1

              from torch.utils.data import IterDataPipe
from torch.utils.data.datapipes.utils.common import get_file_pathnames_from_root
from typing import List, Union, Iterator

class ListDirFilesIterDataPipe(IterDataPipe[str]):
    r""" :class:`ListDirFilesIterDataPipe`

    Iterable DataPipe to load file pathname(s) (path + filename), yield pathname from given disk root dir.
    args:

            

Reported by Pylint.

Line too long (106/100)
Error

Line: 8 Column: 1

              class ListDirFilesIterDataPipe(IterDataPipe[str]):
    r""" :class:`ListDirFilesIterDataPipe`

    Iterable DataPipe to load file pathname(s) (path + filename), yield pathname from given disk root dir.
    args:
        root : root dir
        mask : a unix style filter string or string list for filtering file name(s)
        abspath : whether to return relative pathname or absolute pathname
        length : a nominal length of the datapipe

            

Reported by Pylint.

torch/nn/utils/init.py
3 issues
Missing module docstring
Error

Line: 1 Column: 1

              import inspect
import torch


def skip_init(module_cls, *args, **kwargs):
    r"""
    Given a module class object and args / kwargs, instantiates the module without initializing
    parameters / buffers.  This can be useful if initialization is slow or if custom initialization will
    be performed, making the default initialization unnecessary. There are some caveats to this, due to

            

Reported by Pylint.

Line too long (104/100)
Error

Line: 8 Column: 1

              def skip_init(module_cls, *args, **kwargs):
    r"""
    Given a module class object and args / kwargs, instantiates the module without initializing
    parameters / buffers.  This can be useful if initialization is slow or if custom initialization will
    be performed, making the default initialization unnecessary. There are some caveats to this, due to
    the way this function is implemented:

    1. The module must accept a `device` arg in its constructor that is passed to any parameters
    or buffers created during construction.

            

Reported by Pylint.

Line too long (103/100)
Error

Line: 9 Column: 1

                  r"""
    Given a module class object and args / kwargs, instantiates the module without initializing
    parameters / buffers.  This can be useful if initialization is slow or if custom initialization will
    be performed, making the default initialization unnecessary. There are some caveats to this, due to
    the way this function is implemented:

    1. The module must accept a `device` arg in its constructor that is passed to any parameters
    or buffers created during construction.


            

Reported by Pylint.

torch/utils/data/datapipes/iter/tobytes.py
3 issues
Missing module docstring
Error

Line: 1 Column: 1

              from typing import Tuple
from torch.utils.data import IterDataPipe


class ToBytesIterDataPipe(IterDataPipe[Tuple[str, bytes]]):
    r""" :class:`ToBytesIterDataPipe`

    Iterable DataPipe to load IO stream with label name,
    and to yield bytes with label name in a tuple

            

Reported by Pylint.

Too few public methods (1/2)
Error

Line: 5 Column: 1

              from torch.utils.data import IterDataPipe


class ToBytesIterDataPipe(IterDataPipe[Tuple[str, bytes]]):
    r""" :class:`ToBytesIterDataPipe`

    Iterable DataPipe to load IO stream with label name,
    and to yield bytes with label name in a tuple
    args:

            

Reported by Pylint.

Variable name "d" doesn't conform to snake_case naming style
Error

Line: 21 Column: 17

                  def __iter__(self):
        for (furl, stream) in self.source_datapipe:
            while True:
                d = stream.read(self.chunk)
                if not d:
                    break
                yield (furl, d)

            

Reported by Pylint.

torch/nn/utils/memory_format.py
3 issues
TODO: expand this to `_ConvNd` when channels_last support is extended
Error

Line: 62 Column: 3

                      >>>  out = model(input)
    """

    # TODO: expand this to `_ConvNd` when channels_last support is extended
    # beyond only 4d tensors.
    if isinstance(module, torch.nn.Conv2d) or isinstance(module, torch.nn.ConvTranspose2d):
        weight_data = module.weight.detach().clone().contiguous(memory_format=memory_format)
        module.weight.data = weight_data.resize_(weight_data.size(), memory_format=memory_format)
    for child in module.children():

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch

def convert_conv2d_weight_memory_format(module, memory_format):
    r"""Convert ``memory_format`` of ``nn.Conv2d.weight`` to ``memory_format``
    The conversion recursively applies to nested ``nn.Module``, including ``module``.
    Note that it only changes the memory_format, but not the semantics of each dimensions.
    This function is used to facilitate the computation to adopt NHWC kernels, which
    provides considerable speed up for fp16 data on CUDA devices with compute capability >= 7.0


            

Reported by Pylint.

Consider merging these isinstance calls to isinstance(module, (torch.nn.Conv2d, torch.nn.ConvTranspose2d))
Error

Line: 64 Column: 8

              
    # TODO: expand this to `_ConvNd` when channels_last support is extended
    # beyond only 4d tensors.
    if isinstance(module, torch.nn.Conv2d) or isinstance(module, torch.nn.ConvTranspose2d):
        weight_data = module.weight.detach().clone().contiguous(memory_format=memory_format)
        module.weight.data = weight_data.resize_(weight_data.size(), memory_format=memory_format)
    for child in module.children():
        convert_conv2d_weight_memory_format(child, memory_format)
    return module

            

Reported by Pylint.