The following issues were found

caffe2/python/operator_test/batch_box_cox_test.py
25 issues
Unable to import 'hypothesis'
Error

Line: 7 Column: 1

              

from caffe2.python import core
from hypothesis import given, settings

import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np

            

Reported by Pylint.

Unable to import 'hypothesis.strategies'
Error

Line: 11 Column: 1

              
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np


# The reference implementation is susceptible to numerical cancellation when
# *lambda1* is small and *data* is near one. We leave it up to the caller to

            

Reported by Pylint.

Unused argument 'dc'
Error

Line: 99 Column: 41

                                [0, 0, 0], [0, 0, 1e-6])
        self.batch_box_cox(inputs, gc, dc)

    def batch_box_cox(self, inputs, gc, dc):
        N, D, data, lambda1, lambda2 = inputs

        data = np.array(data, dtype=np.float32).reshape(N, D)
        lambda1 = np.array(lambda1, dtype=np.float32)
        lambda2 = np.array(lambda2, dtype=np.float32)

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              




from caffe2.python import core
from hypothesis import given, settings

import caffe2.python.hypothesis_test_util as hu

            

Reported by Pylint.

Variable name "N" doesn't conform to snake_case naming style
Error

Line: 27 Column: 5

              
@st.composite
def _inputs(draw):
    N = draw(st.integers(min_value=0, max_value=5))
    D = draw(st.integers(min_value=1, max_value=5))
    # N, D, data, lambda1, lambda2
    return (
        N,
        D,

            

Reported by Pylint.

Variable name "D" doesn't conform to snake_case naming style
Error

Line: 28 Column: 5

              @st.composite
def _inputs(draw):
    N = draw(st.integers(min_value=0, max_value=5))
    D = draw(st.integers(min_value=1, max_value=5))
    # N, D, data, lambda1, lambda2
    return (
        N,
        D,
        draw(st.lists(

            

Reported by Pylint.

Missing class docstring
Error

Line: 55 Column: 1

                  )


class TestBatchBoxCox(serial.SerializedTestCase):
    @given(
        inputs=_inputs(),
        **hu.gcs_cpu_only
    )
    @settings(deadline=10000)

            

Reported by Pylint.

Argument name "gc" doesn't conform to snake_case naming style
Error

Line: 60 Column: 5

                      inputs=_inputs(),
        **hu.gcs_cpu_only
    )
    @settings(deadline=10000)
    def test_batch_box_cox(self, inputs, gc, dc):
        self.batch_box_cox(inputs, gc, dc)

    @given(**hu.gcs_cpu_only)
    @settings(deadline=10000)

            

Reported by Pylint.

Argument name "dc" doesn't conform to snake_case naming style
Error

Line: 60 Column: 5

                      inputs=_inputs(),
        **hu.gcs_cpu_only
    )
    @settings(deadline=10000)
    def test_batch_box_cox(self, inputs, gc, dc):
        self.batch_box_cox(inputs, gc, dc)

    @given(**hu.gcs_cpu_only)
    @settings(deadline=10000)

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 60 Column: 5

                      inputs=_inputs(),
        **hu.gcs_cpu_only
    )
    @settings(deadline=10000)
    def test_batch_box_cox(self, inputs, gc, dc):
        self.batch_box_cox(inputs, gc, dc)

    @given(**hu.gcs_cpu_only)
    @settings(deadline=10000)

            

Reported by Pylint.

test/distributed/algorithms/test_join.py
25 issues
Unable to import 'torch'
Error

Line: 6 Column: 1

              import sys
from typing import Any, Optional

import torch
import torch.distributed as dist

if not dist.is_available():
    print("Distributed not available, skipping tests", file=sys.stderr)
    sys.exit(0)

            

Reported by Pylint.

Unable to import 'torch.distributed'
Error

Line: 7 Column: 1

              from typing import Any, Optional

import torch
import torch.distributed as dist

if not dist.is_available():
    print("Distributed not available, skipping tests", file=sys.stderr)
    sys.exit(0)


            

Reported by Pylint.

Unable to import 'torch.distributed.algorithms.join'
Error

Line: 13 Column: 1

                  print("Distributed not available, skipping tests", file=sys.stderr)
    sys.exit(0)

from torch.distributed.algorithms.join import Join, Joinable, JoinHook
from torch.testing._internal.common_distributed import (
    MultiProcessTestCase,
    require_n_gpus_for_nccl_backend,
)
from torch.testing._internal.common_utils import run_tests, TEST_WITH_DEV_DBG_ASAN

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_distributed'
Error

Line: 14 Column: 1

                  sys.exit(0)

from torch.distributed.algorithms.join import Join, Joinable, JoinHook
from torch.testing._internal.common_distributed import (
    MultiProcessTestCase,
    require_n_gpus_for_nccl_backend,
)
from torch.testing._internal.common_utils import run_tests, TEST_WITH_DEV_DBG_ASAN


            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 18 Column: 1

                  MultiProcessTestCase,
    require_n_gpus_for_nccl_backend,
)
from torch.testing._internal.common_utils import run_tests, TEST_WITH_DEV_DBG_ASAN

if TEST_WITH_DEV_DBG_ASAN:
    print("Skip dev-asan as torch + multiprocessing spawn have known issues", file=sys.stderr)
    sys.exit(0)


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import contextlib
import os
import sys
from typing import Any, Optional

import torch
import torch.distributed as dist

if not dist.is_available():

            

Reported by Pylint.

Import "from torch.distributed.algorithms.join import Join, Joinable, JoinHook" should be placed at the top of the module
Error

Line: 13 Column: 1

                  print("Distributed not available, skipping tests", file=sys.stderr)
    sys.exit(0)

from torch.distributed.algorithms.join import Join, Joinable, JoinHook
from torch.testing._internal.common_distributed import (
    MultiProcessTestCase,
    require_n_gpus_for_nccl_backend,
)
from torch.testing._internal.common_utils import run_tests, TEST_WITH_DEV_DBG_ASAN

            

Reported by Pylint.

Import "from torch.testing._internal.common_distributed import MultiProcessTestCase, require_n_gpus_for_nccl_backend" should be placed at the top of the module
Error

Line: 14 Column: 1

                  sys.exit(0)

from torch.distributed.algorithms.join import Join, Joinable, JoinHook
from torch.testing._internal.common_distributed import (
    MultiProcessTestCase,
    require_n_gpus_for_nccl_backend,
)
from torch.testing._internal.common_utils import run_tests, TEST_WITH_DEV_DBG_ASAN


            

Reported by Pylint.

Import "from torch.testing._internal.common_utils import run_tests, TEST_WITH_DEV_DBG_ASAN" should be placed at the top of the module
Error

Line: 18 Column: 1

                  MultiProcessTestCase,
    require_n_gpus_for_nccl_backend,
)
from torch.testing._internal.common_utils import run_tests, TEST_WITH_DEV_DBG_ASAN

if TEST_WITH_DEV_DBG_ASAN:
    print("Skip dev-asan as torch + multiprocessing spawn have known issues", file=sys.stderr)
    sys.exit(0)


            

Reported by Pylint.

Variable name "t" doesn't conform to snake_case naming style
Error

Line: 60 Column: 13

                      """
        device = self.allreducer.device
        for _ in range(self.num_allreduces):
            t = torch.zeros(1, device=device)
            dist.all_reduce(t)

    def post_hook(self, is_last_joiner: bool):
        r"""
        Broadcasts a tensor containing a magic constant ``AFTER_CONSTANT`` from

            

Reported by Pylint.

caffe2/python/operator_test/stats_put_ops_test.py
25 issues
Using deprecated method assertEquals()
Error

Line: 40 Column: 9

              
        self.assertIn(stat_name + sum_postfix, stat_dict)
        self.assertIn(stat_name + count_postfix, stat_dict)
        self.assertEquals(stat_dict[stat_name + sum_postfix],
         default_value * magnitude_expand)
        self.assertEquals(stat_dict[stat_name + count_postfix], 1)

    def test_clamp(self):
        put_value = 10

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 42 Column: 9

                      self.assertIn(stat_name + count_postfix, stat_dict)
        self.assertEquals(stat_dict[stat_name + sum_postfix],
         default_value * magnitude_expand)
        self.assertEquals(stat_dict[stat_name + count_postfix], 1)

    def test_clamp(self):
        put_value = 10
        magnitude_expand = int(1e18)
        stat_name = "stat".encode('ascii')

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 71 Column: 9

              
        self.assertIn(stat_name + sum_postfix, stat_dict)
        self.assertIn(stat_name + count_postfix, stat_dict)
        self.assertEquals(stat_dict[stat_name + sum_postfix],
            9223372036854775807)
        self.assertEquals(stat_dict[stat_name + count_postfix], 1)

    def test_clamp_with_out_of_bounds(self):
        put_value = float(1e20)

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 73 Column: 9

                      self.assertIn(stat_name + count_postfix, stat_dict)
        self.assertEquals(stat_dict[stat_name + sum_postfix],
            9223372036854775807)
        self.assertEquals(stat_dict[stat_name + count_postfix], 1)

    def test_clamp_with_out_of_bounds(self):
        put_value = float(1e20)
        magnitude_expand = 1000000000000
        stat_name = "stat".encode('ascii')

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 102 Column: 9

              
        self.assertIn(stat_name + sum_postfix, stat_dict)
        self.assertIn(stat_name + count_postfix, stat_dict)
        self.assertEquals(stat_dict[stat_name + sum_postfix],
            9223372036854775807)
        self.assertEquals(stat_dict[stat_name + count_postfix], 1)

    def test_avg_put_ops(self):
        put_value = 15.1111

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 104 Column: 9

                      self.assertIn(stat_name + count_postfix, stat_dict)
        self.assertEquals(stat_dict[stat_name + sum_postfix],
            9223372036854775807)
        self.assertEquals(stat_dict[stat_name + count_postfix], 1)

    def test_avg_put_ops(self):
        put_value = 15.1111
        magnitude_expand = 10000
        stat_name = "a1".encode('ascii')

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 132 Column: 9

              
        self.assertIn(stat_name + sum_postfix, stat_dict)
        self.assertIn(stat_name + count_postfix, stat_dict)
        self.assertEquals(stat_dict[stat_name + sum_postfix],
         put_value * magnitude_expand)
        self.assertEquals(stat_dict[stat_name + count_postfix], 1)

    def test_increment_put_ops(self):
        put_value = 15.1111

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 134 Column: 9

                      self.assertIn(stat_name + count_postfix, stat_dict)
        self.assertEquals(stat_dict[stat_name + sum_postfix],
         put_value * magnitude_expand)
        self.assertEquals(stat_dict[stat_name + count_postfix], 1)

    def test_increment_put_ops(self):
        put_value = 15.1111
        magnitude_expand = 10000
        stat_name = "i1".encode('ascii')

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 160 Column: 9

                      stat_dict = dict(zip(k, v))

        self.assertIn(stat_name + member_postfix, stat_dict)
        self.assertEquals(stat_dict[stat_name + member_postfix],
         put_value * magnitude_expand)

    def test_stddev_put_ops(self):
        put_value = 15.1111
        magnitude_expand = 10000

            

Reported by Pylint.

Using deprecated method assertEquals()
Error

Line: 193 Column: 9

                      self.assertIn(stat_name + count_postfix, stat_dict)
        self.assertIn(stat_name + sumoffset_postfix, stat_dict)
        self.assertIn(stat_name + sumsqoffset_postfix, stat_dict)
        self.assertEquals(stat_dict[stat_name + sum_postfix],
            put_value * magnitude_expand)
        self.assertEquals(stat_dict[stat_name + count_postfix], 1)

            

Reported by Pylint.

torch/nn/grad.py
25 issues
Attempted relative import beyond top-level package
Error

Line: 4 Column: 1

              """Gradient interface"""

import torch
from .modules.utils import _single, _pair, _triple
import warnings


def _grad_input_padding(grad_output, input_size, stride, padding, kernel_size, dilation=None):
    if dilation is None:

            

Reported by Pylint.

Module 'torch' has no 'conv_transpose1d' member
Error

Line: 76 Column: 12

                  grad_input_padding = _grad_input_padding(grad_output, input_size, stride,
                                             padding, kernel_size, dilation)

    return torch.conv_transpose1d(
        grad_output, weight, None, stride, padding, grad_input_padding, groups,
        dilation)


def conv1d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):

            

Reported by Pylint.

Module 'torch' has no 'conv1d' member
Error

Line: 118 Column: 19

                  input = input.contiguous().view(1, input.shape[0] * input.shape[1],
                                    input.shape[2])

    grad_weight = torch.conv1d(input, grad_output, None, dilation, padding,
                               stride, in_channels * min_batch)

    grad_weight = grad_weight.contiguous().view(
        min_batch, grad_weight.shape[1] // min_batch, grad_weight.shape[2])


            

Reported by Pylint.

Module 'torch' has no 'conv_transpose2d' member
Error

Line: 165 Column: 12

                  grad_input_padding = _grad_input_padding(grad_output, input_size, stride,
                                             padding, kernel_size, dilation)

    return torch.conv_transpose2d(
        grad_output, weight, None, stride, padding, grad_input_padding, groups,
        dilation)


def conv2d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):

            

Reported by Pylint.

Module 'torch' has no 'conv2d' member
Error

Line: 209 Column: 19

                  input = input.contiguous().view(1, input.shape[0] * input.shape[1],
                                    input.shape[2], input.shape[3])

    grad_weight = torch.conv2d(input, grad_output, None, dilation, padding,
                               stride, in_channels * min_batch)

    grad_weight = grad_weight.contiguous().view(
        min_batch, grad_weight.shape[1] // min_batch, grad_weight.shape[2],
        grad_weight.shape[3])

            

Reported by Pylint.

Module 'torch' has no 'conv_transpose3d' member
Error

Line: 258 Column: 12

                  grad_input_padding = _grad_input_padding(grad_output, input_size, stride,
                                             padding, kernel_size, dilation)

    return torch.conv_transpose3d(
        grad_output, weight, None, stride, padding, grad_input_padding, groups,
        dilation)


def conv3d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):

            

Reported by Pylint.

Module 'torch' has no 'conv3d' member
Error

Line: 302 Column: 19

                                                  input.shape[2], input.shape[3],
                                    input.shape[4])

    grad_weight = torch.conv3d(input, grad_output, None, dilation, padding,
                               stride, in_channels * min_batch)

    grad_weight = grad_weight.contiguous().view(
        min_batch, grad_weight.shape[1] // min_batch, grad_weight.shape[2],
        grad_weight.shape[3], grad_weight.shape[4])

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 81 Column: 19

                      dilation)


def conv1d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):
    r"""
    Computes the gradient of conv1d with respect to the weight of the convolution.

    Args:
        input: input tensor of shape (minibatch x in_channels x iW)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 170 Column: 19

                      dilation)


def conv2d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):
    r"""
    Computes the gradient of conv2d with respect to the weight of the convolution.

    Args:
        input: input tensor of shape (minibatch x in_channels x iH x iW)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 263 Column: 19

                      dilation)


def conv3d_weight(input, weight_size, grad_output, stride=1, padding=0, dilation=1, groups=1):
    r"""
    Computes the gradient of conv3d with respect to the weight of the convolution.

    Args:
        input: input tensor of shape (minibatch x in_channels x iT x iH x iW)

            

Reported by Pylint.

test/distributed/pipeline/sync/test_inplace.py
25 issues
Unable to import 'pytest'
Error

Line: 7 Column: 1

              #
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from torch import nn

from torch.distributed.pipeline.sync import Pipe


            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 8 Column: 1

              # This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from torch import nn

from torch.distributed.pipeline.sync import Pipe



            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 9 Column: 1

              # LICENSE file in the root directory of this source tree.
import pytest
import torch
from torch import nn

from torch.distributed.pipeline.sync import Pipe


def test_inplace_on_requires_grad(setup_rpc):

            

Reported by Pylint.

Unable to import 'torch.distributed.pipeline.sync'
Error

Line: 11 Column: 1

              import torch
from torch import nn

from torch.distributed.pipeline.sync import Pipe


def test_inplace_on_requires_grad(setup_rpc):
    model = nn.Sequential(nn.Linear(1, 1), nn.ReLU(inplace=True))
    model = Pipe(model, checkpoint="always")

            

Reported by Pylint.

Unused argument 'setup_rpc'
Error

Line: 14 Column: 35

              from torch.distributed.pipeline.sync import Pipe


def test_inplace_on_requires_grad(setup_rpc):
    model = nn.Sequential(nn.Linear(1, 1), nn.ReLU(inplace=True))
    model = Pipe(model, checkpoint="always")

    x = torch.rand(1)
    y = model(x).local_value()

            

Reported by Pylint.

Unused argument 'setup_rpc'
Error

Line: 27 Column: 39

              

@pytest.mark.xfail(strict=True)
def test_inplace_on_not_requires_grad(setup_rpc):
    # In-place operation on a tensor not requiring grad doesn't cause a
    # RuntimeError. Currently, we cannot detect this case.
    model = nn.Sequential(nn.ReLU(inplace=True))
    model = Pipe(model, [1], devices=["cpu"], checkpoint="always")


            

Reported by Pylint.

Unused argument 'setup_rpc'
Error

Line: 43 Column: 33

              

@pytest.mark.xfail(strict=True)
def test_inplace_incorrect_grad(setup_rpc):
    class M(nn.Module):
        def forward(self, foo_bar):
            # 'foo' requires grad but 'bar' does not. In-place operation on
            # 'bar' won't cause a RuntimeError.
            foo, bar = foo_bar

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # Copyright 2019 Kakao Brain
#
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import torch
from torch import nn

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 14 Column: 1

              from torch.distributed.pipeline.sync import Pipe


def test_inplace_on_requires_grad(setup_rpc):
    model = nn.Sequential(nn.Linear(1, 1), nn.ReLU(inplace=True))
    model = Pipe(model, checkpoint="always")

    x = torch.rand(1)
    y = model(x).local_value()

            

Reported by Pylint.

Variable name "x" doesn't conform to snake_case naming style
Error

Line: 18 Column: 5

                  model = nn.Sequential(nn.Linear(1, 1), nn.ReLU(inplace=True))
    model = Pipe(model, checkpoint="always")

    x = torch.rand(1)
    y = model(x).local_value()

    message = r"a leaf Variable that requires grad .* used in an in-place operation."
    with pytest.raises(RuntimeError, match=message):
        y.backward()

            

Reported by Pylint.

tools/coverage_plugins_package/src/coverage_plugins/jit_plugin.py
24 issues
Unable to import 'coverage'
Error

Line: 11 Column: 1

              marked as covered.
'''

from coverage import CoveragePlugin, CoverageData  # type: ignore[import]
from inspect import ismodule, isclass, ismethod, isfunction, iscode, getsourcefile, getsourcelines
from time import time
from typing import Any

# All coverage stats resulting from this plug-in will be in a separate .coverage file that should be merged later with

            

Reported by Pylint.

TODO: Because torch.jit._IgnoreContextManager relies on Python's `exec` method
Error

Line: 45 Column: 3

                              filename = getsourcefile(obj)
                # We don't want to report for filename = None
                if filename:
                    # TODO: Because torch.jit._IgnoreContextManager relies on Python's `exec` method
                    # which doesn't generate source codelines, getsourcelines(obj) fails. For now,
                    # we just ignore the exception until we figure out a better way to
                    # implement torch.jit._IgnoreContextManager.
                    try:
                        sourcelines, starting_lineno = getsourcelines(obj)

            

Reported by Pylint.

Unused argument 'options'
Error

Line: 58 Column: 29

                                      cov_data.add_lines(line_data)
        super().dynamic_context(frame)

def coverage_init(reg: Any, options: Any) -> None:
    reg.add_dynamic_context(JitPlugin())

            

Reported by Pylint.

Line too long (117/100)
Error

Line: 2 Column: 1

              '''
This coverage plug-in attempts to cover JIT'd functions and methods that were previously missed in code coverage. Any
function and method that was passed through/decorated with torch.jit.script or torch.jit.script_method should now be
marked covered when coverage is run with this plug-in.

DISCLAIMER: note that this will mark the entire JIT'd function/method as covered without seeking proof that the
compiled code has been executed. This means that even if the code chunk is merely compiled and not run, it will get
marked as covered.
'''

            

Reported by Pylint.

Line too long (116/100)
Error

Line: 3 Column: 1

              '''
This coverage plug-in attempts to cover JIT'd functions and methods that were previously missed in code coverage. Any
function and method that was passed through/decorated with torch.jit.script or torch.jit.script_method should now be
marked covered when coverage is run with this plug-in.

DISCLAIMER: note that this will mark the entire JIT'd function/method as covered without seeking proof that the
compiled code has been executed. This means that even if the code chunk is merely compiled and not run, it will get
marked as covered.
'''

            

Reported by Pylint.

Line too long (111/100)
Error

Line: 6 Column: 1

              function and method that was passed through/decorated with torch.jit.script or torch.jit.script_method should now be
marked covered when coverage is run with this plug-in.

DISCLAIMER: note that this will mark the entire JIT'd function/method as covered without seeking proof that the
compiled code has been executed. This means that even if the code chunk is merely compiled and not run, it will get
marked as covered.
'''

from coverage import CoveragePlugin, CoverageData  # type: ignore[import]

            

Reported by Pylint.

Line too long (115/100)
Error

Line: 7 Column: 1

              marked covered when coverage is run with this plug-in.

DISCLAIMER: note that this will mark the entire JIT'd function/method as covered without seeking proof that the
compiled code has been executed. This means that even if the code chunk is merely compiled and not run, it will get
marked as covered.
'''

from coverage import CoveragePlugin, CoverageData  # type: ignore[import]
from inspect import ismodule, isclass, ismethod, isfunction, iscode, getsourcefile, getsourcelines

            

Reported by Pylint.

standard import "from inspect import ismodule, isclass, ismethod, isfunction, iscode, getsourcefile, getsourcelines" should be placed before "from coverage import CoveragePlugin, CoverageData"
Error

Line: 12 Column: 1

              '''

from coverage import CoveragePlugin, CoverageData  # type: ignore[import]
from inspect import ismodule, isclass, ismethod, isfunction, iscode, getsourcefile, getsourcelines
from time import time
from typing import Any

# All coverage stats resulting from this plug-in will be in a separate .coverage file that should be merged later with
# `coverage combine`. The convention seems to be .coverage.dotted.suffix based on the following link:

            

Reported by Pylint.

standard import "from time import time" should be placed before "from coverage import CoveragePlugin, CoverageData"
Error

Line: 13 Column: 1

              
from coverage import CoveragePlugin, CoverageData  # type: ignore[import]
from inspect import ismodule, isclass, ismethod, isfunction, iscode, getsourcefile, getsourcelines
from time import time
from typing import Any

# All coverage stats resulting from this plug-in will be in a separate .coverage file that should be merged later with
# `coverage combine`. The convention seems to be .coverage.dotted.suffix based on the following link:
# https://coverage.readthedocs.io/en/coverage-5.5/cmd.html#combining-data-files-coverage-combine

            

Reported by Pylint.

standard import "from typing import Any" should be placed before "from coverage import CoveragePlugin, CoverageData"
Error

Line: 14 Column: 1

              from coverage import CoveragePlugin, CoverageData  # type: ignore[import]
from inspect import ismodule, isclass, ismethod, isfunction, iscode, getsourcefile, getsourcelines
from time import time
from typing import Any

# All coverage stats resulting from this plug-in will be in a separate .coverage file that should be merged later with
# `coverage combine`. The convention seems to be .coverage.dotted.suffix based on the following link:
# https://coverage.readthedocs.io/en/coverage-5.5/cmd.html#combining-data-files-coverage-combine
cov_data = CoverageData(basename=f'.coverage.jit.{time()}')

            

Reported by Pylint.

caffe2/python/operator_test/split_op_cost_test.py
24 issues
Redefining name 'workspace' from outer scope (line 2)
Error

Line: 7 Column: 28

              

class TestSplitOpCost(TestCase):
    def _verify_cost(self, workspace, split_op):
        flops, bytes_written, bytes_read = workspace.GetOperatorCost(
            split_op, split_op.input
        )
        self.assertEqual(flops, 0)
        self.assertEqual(

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import numpy as np
from caffe2.python import core, workspace
from caffe2.python.test_util import TestCase


class TestSplitOpCost(TestCase):
    def _verify_cost(self, workspace, split_op):
        flops, bytes_written, bytes_read = workspace.GetOperatorCost(
            split_op, split_op.input

            

Reported by Pylint.

Missing class docstring
Error

Line: 6 Column: 1

              from caffe2.python.test_util import TestCase


class TestSplitOpCost(TestCase):
    def _verify_cost(self, workspace, split_op):
        flops, bytes_written, bytes_read = workspace.GetOperatorCost(
            split_op, split_op.input
        )
        self.assertEqual(flops, 0)

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 21 Column: 5

                          sum(workspace.FetchBlob(b).nbytes for b in split_op.output),
        )

    def test_columnwise_equal_outputSplit(self):
        workspace.ResetWorkspace()
        workspace.FeedBlob("input", np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32))
        split_op = core.CreateOperator(
            "Split",
            ["input"],

            

Reported by Pylint.

Method name "test_columnwise_equal_outputSplit" doesn't conform to snake_case naming style
Error

Line: 21 Column: 5

                          sum(workspace.FetchBlob(b).nbytes for b in split_op.output),
        )

    def test_columnwise_equal_outputSplit(self):
        workspace.ResetWorkspace()
        workspace.FeedBlob("input", np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32))
        split_op = core.CreateOperator(
            "Split",
            ["input"],

            

Reported by Pylint.

Method name "test_rowwise_equal_outputSplit" doesn't conform to snake_case naming style
Error

Line: 43 Column: 5

              
        self._verify_cost(workspace, split_op)

    def test_rowwise_equal_outputSplit(self):
        workspace.ResetWorkspace()
        workspace.FeedBlob("input", np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32))
        split_op = core.CreateOperator(
            "Split",
            ["input"],

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 43 Column: 5

              
        self._verify_cost(workspace, split_op)

    def test_rowwise_equal_outputSplit(self):
        workspace.ResetWorkspace()
        workspace.FeedBlob("input", np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32))
        split_op = core.CreateOperator(
            "Split",
            ["input"],

            

Reported by Pylint.

Method name "test_columnwise_equal_outputSplit_columnRemoved" doesn't conform to snake_case naming style
Error

Line: 63 Column: 5

              
        self._verify_cost(workspace, split_op)

    def test_columnwise_equal_outputSplit_columnRemoved(self):
        workspace.ResetWorkspace()
        workspace.FeedBlob("input", np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32))
        # To be able to use 'add_axis' (which should have been called 'remove_axis') on 'axis',
        # the dimensions of split tensors must match on 'axis'
        split_op = core.CreateOperator(

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 63 Column: 5

              
        self._verify_cost(workspace, split_op)

    def test_columnwise_equal_outputSplit_columnRemoved(self):
        workspace.ResetWorkspace()
        workspace.FeedBlob("input", np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32))
        # To be able to use 'add_axis' (which should have been called 'remove_axis') on 'axis',
        # the dimensions of split tensors must match on 'axis'
        split_op = core.CreateOperator(

            

Reported by Pylint.

Method name "test_rowwise_equal_outputSplit_rowRemoved" doesn't conform to snake_case naming style
Error

Line: 89 Column: 5

              
        self._verify_cost(workspace, split_op)

    def test_rowwise_equal_outputSplit_rowRemoved(self):
        workspace.ResetWorkspace()
        workspace.FeedBlob("input", np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32))
        split_op = core.CreateOperator(
            "Split",
            ["input"],

            

Reported by Pylint.

caffe2/python/models/resnet.py
24 issues
String statement has no effect
Error

Line: 11 Column: 1

              from caffe2.python import brew
import logging

'''
Utility for creating ResNe(X)t
"Deep Residual Learning for Image Recognition" by He, Zhang et. al. 2015
"Aggregated Residual Transformations for Deep Neural Networks" by Xie et. al. 2016
'''


            

Reported by Pylint.

String statement has no effect
Error

Line: 86 Column: 5

                      )
        return self.prev_blob

    '''
    Add a "bottleneck" component as described in He et. al. Figure 3 (right)
    '''

    def add_bottleneck(
        self,

            

Reported by Pylint.

Redefining name 'log' from outer scope (line 285)
Error

Line: 306 Column: 5

                  conv1_kernel=7,
    conv1_stride=2,
    final_avg_kernel=7,
    log=None,
    bn_epsilon=1e-5,
    bn_momentum=0.9,
):
    if num_layers not in RESNEXT_BLOCK_CONFIG:
        log.error("{}-layer is invalid for resnext config".format(num_layers))

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              ## @package resnet
# Module caffe2.python.models.resnet





from caffe2.python import brew
import logging

            

Reported by Pylint.

standard import "import logging" should be placed before "from caffe2.python import brew"
Error

Line: 9 Column: 1

              

from caffe2.python import brew
import logging

'''
Utility for creating ResNe(X)t
"Deep Residual Learning for Image Recognition" by He, Zhang et. al. 2015
"Aggregated Residual Transformations for Deep Neural Networks" by Xie et. al. 2016

            

Reported by Pylint.

Too many instance attributes (8/7)
Error

Line: 18 Column: 1

              '''


class ResNetBuilder():
    '''
    Helper class for constructing residual blocks.
    '''

    def __init__(

            

Reported by Pylint.

Too many arguments (7/5)
Error

Line: 23 Column: 5

                  Helper class for constructing residual blocks.
    '''

    def __init__(
        self,
        model,
        prev_blob,
        no_bias,
        is_test,

            

Reported by Pylint.

Too many arguments (7/5)
Error

Line: 41 Column: 5

                      self.bn_momentum = bn_momentum
        self.no_bias = 1 if no_bias else 0

    def add_conv(
        self,
        in_filters,
        out_filters,
        kernel,
        stride=1,

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 41 Column: 5

                      self.bn_momentum = bn_momentum
        self.no_bias = 1 if no_bias else 0

    def add_conv(
        self,
        in_filters,
        out_filters,
        kernel,
        stride=1,

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 66 Column: 5

                      )
        return self.prev_blob

    def add_relu(self):
        self.prev_blob = brew.relu(
            self.model,
            self.prev_blob,
            self.prev_blob,  # in-place
        )

            

Reported by Pylint.

torch/fx/passes/split_utils.py
24 issues
Attempted relative import beyond top-level package
Error

Line: 7 Column: 1

              import torch.fx
import torch.nn as nn
from torch.fx.graph import map_arg
from .tools_common import NodeList, NodeSet


@dataclass
class Component:
    """

            

Reported by Pylint.

Method 'forward' is abstract in class 'torch.nn.modules.module' but is not overridden
Error

Line: 35 Column: 1

                  gm: Optional[torch.fx.GraphModule] = None


class HolderModule(nn.Module):
    """
    HolderModule is used to copy all the attributes from original module to submodules
    that uses the attributes
    """


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from dataclasses import dataclass, field
from typing import List, Optional, Dict

import torch.fx
import torch.nn as nn
from torch.fx.graph import map_arg
from .tools_common import NodeList, NodeSet



            

Reported by Pylint.

Too many instance attributes (9/7)
Error

Line: 11 Column: 1

              

@dataclass
class Component:
    """
    A component serves as a container for a subgraph we want to create afterwards.
    """

    graph: torch.fx.Graph

            

Reported by Pylint.

Attribute name "gm" doesn't conform to snake_case naming style
Error

Line: 32 Column: 5

                  # Mapping from get_attr node in original graph to get_attr node in `graph`.
    getattr_maps: Dict[torch.fx.Node, torch.fx.Node] = field(default_factory=dict)
    constructor_args: List[str] = field(default_factory=list)
    gm: Optional[torch.fx.GraphModule] = None


class HolderModule(nn.Module):
    """
    HolderModule is used to copy all the attributes from original module to submodules

            

Reported by Pylint.

Variable name "v" doesn't conform to snake_case naming style
Error

Line: 43 Column: 16

              
    def __init__(self, d):
        super().__init__()
        for k, v in d.items():
            self.add_module(k, v)


def split_by_tags(gm: torch.fx.GraphModule, tags: List[str]) -> torch.fx.GraphModule:
    """

            

Reported by Pylint.

Too many local variables (32/15)
Error

Line: 47 Column: 1

                          self.add_module(k, v)


def split_by_tags(gm: torch.fx.GraphModule, tags: List[str]) -> torch.fx.GraphModule:
    """
    Splits a GraphModule using tags on its graph nodes. We honor the order of
    tags. For example, we have tags = ["a", "b", "c"], the function will create
    the initial submodules in the order of "a_0", "b_1", "c_2".


            

Reported by Pylint.

Argument name "gm" doesn't conform to snake_case naming style
Error

Line: 47 Column: 1

                          self.add_module(k, v)


def split_by_tags(gm: torch.fx.GraphModule, tags: List[str]) -> torch.fx.GraphModule:
    """
    Splits a GraphModule using tags on its graph nodes. We honor the order of
    tags. For example, we have tags = ["a", "b", "c"], the function will create
    the initial submodules in the order of "a_0", "b_1", "c_2".


            

Reported by Pylint.

Too many statements (77/50)
Error

Line: 47 Column: 1

                          self.add_module(k, v)


def split_by_tags(gm: torch.fx.GraphModule, tags: List[str]) -> torch.fx.GraphModule:
    """
    Splits a GraphModule using tags on its graph nodes. We honor the order of
    tags. For example, we have tags = ["a", "b", "c"], the function will create
    the initial submodules in the order of "a_0", "b_1", "c_2".


            

Reported by Pylint.

Too many branches (22/12)
Error

Line: 47 Column: 1

                          self.add_module(k, v)


def split_by_tags(gm: torch.fx.GraphModule, tags: List[str]) -> torch.fx.GraphModule:
    """
    Splits a GraphModule using tags on its graph nodes. We honor the order of
    tags. For example, we have tags = ["a", "b", "c"], the function will create
    the initial submodules in the order of "a_0", "b_1", "c_2".


            

Reported by Pylint.

torch/distributed/pipeline/sync/skip/tracker.py
24 issues
Attempted relative import beyond top-level package
Error

Line: 14 Column: 1

              
from torch import Tensor

from ..checkpoint import is_checkpointing
from ..dependency import fork, join
from ..microbatch import Batch
from ..stream import AbstractStream
from .layout import SkipLayout
from .namespace import Namespace

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 15 Column: 1

              from torch import Tensor

from ..checkpoint import is_checkpointing
from ..dependency import fork, join
from ..microbatch import Batch
from ..stream import AbstractStream
from .layout import SkipLayout
from .namespace import Namespace
from .portal import Portal

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 16 Column: 1

              
from ..checkpoint import is_checkpointing
from ..dependency import fork, join
from ..microbatch import Batch
from ..stream import AbstractStream
from .layout import SkipLayout
from .namespace import Namespace
from .portal import Portal


            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 17 Column: 1

              from ..checkpoint import is_checkpointing
from ..dependency import fork, join
from ..microbatch import Batch
from ..stream import AbstractStream
from .layout import SkipLayout
from .namespace import Namespace
from .portal import Portal

__all__: List[str] = []

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 18 Column: 1

              from ..dependency import fork, join
from ..microbatch import Batch
from ..stream import AbstractStream
from .layout import SkipLayout
from .namespace import Namespace
from .portal import Portal

__all__: List[str] = []


            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 19 Column: 1

              from ..microbatch import Batch
from ..stream import AbstractStream
from .layout import SkipLayout
from .namespace import Namespace
from .portal import Portal

__all__: List[str] = []



            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 20 Column: 1

              from ..stream import AbstractStream
from .layout import SkipLayout
from .namespace import Namespace
from .portal import Portal

__all__: List[str] = []


class SkipTracker:

            

Reported by Pylint.

Unused argument 'batch'
Error

Line: 40 Column: 20

                  def __init__(self) -> None:
        self.tensors: Dict[Tuple[Namespace, str], Optional[Tensor]] = {}

    def save(self, batch: Batch, ns: Namespace, name: str, tensor: Optional[Tensor]) -> None:
        self.tensors[(ns, name)] = tensor

    def load(self, batch: Batch, ns: Namespace, name: str) -> Optional[Tensor]:
        return self.tensors.pop((ns, name))


            

Reported by Pylint.

Unused argument 'batch'
Error

Line: 43 Column: 20

                  def save(self, batch: Batch, ns: Namespace, name: str, tensor: Optional[Tensor]) -> None:
        self.tensors[(ns, name)] = tensor

    def load(self, batch: Batch, ns: Namespace, name: str) -> Optional[Tensor]:
        return self.tensors.pop((ns, name))

    def copy(
        self, batch: Batch, prev_stream: AbstractStream, next_stream: AbstractStream, ns: Namespace, name: str,
    ) -> None:

            

Reported by Pylint.

__init__ method from base class '_local' is not called
Error

Line: 146 Column: 5

              

class ThreadLocal(threading.local):
    def __init__(self) -> None:
        self.skip_tracker: Optional[SkipTracker] = None


thread_local = ThreadLocal()


            

Reported by Pylint.