The following issues were found

benchmarks/fastrnns/bench.py
58 issues
Unable to import 'torch'
Error

Line: 3 Column: 1

              import argparse
from collections import namedtuple
import torch
import gc
import sys
import json
import copy
import time


            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 10 Column: 1

              import copy
import time

from .fuser import set_fuser
from .runner import get_nn_runners


BenchResult = namedtuple('BenchResult', [
    'name', 'avg_fwd', 'std_fwd', 'info_fwd', 'avg_bwd', 'std_bwd', 'info_bwd',

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 11 Column: 1

              import time

from .fuser import set_fuser
from .runner import get_nn_runners


BenchResult = namedtuple('BenchResult', [
    'name', 'avg_fwd', 'std_fwd', 'info_fwd', 'avg_bwd', 'std_bwd', 'info_bwd',
])

            

Reported by Pylint.

Unused argument 'colwidth'
Error

Line: 32 Column: 18

                  return str(item)


def print_header(colwidth=16, sep=' '):
    items = []
    for item in BenchResult._fields:
        items.append(fit_str(item))
    return sep.join(items)


            

Reported by Pylint.

Unused argument 'colwidth'
Error

Line: 39 Column: 31

                  return sep.join(items)


def pretty_print(benchresult, colwidth=16, sep=' '):
    items = []
    for thing in benchresult:
        items.append(fit_str(to_str(thing)))
    return sep.join(items)


            

Reported by Pylint.

Attribute 'time' defined outside __init__
Error

Line: 51 Column: 9

                      pass

    def record(self):
        self.time = time.perf_counter()

    def elapsed_time(self, end_event):
        assert isinstance(end_event, Event)
        return end_event.time - self.time


            

Reported by Pylint.

XXX: Use if need to print something
Error

Line: 79 Column: 3

                      forward_output = modeldef.forward(*modeldef.inputs)
        fwd_end_event.record()

        # XXX: Use if need to print something
        # print(modeldef.forward.graph_for(*modeldef.inputs))

        if modeldef.backward_setup is not None:
            backward_input = modeldef.backward_setup(forward_output)
        else:

            

Reported by Pylint.

Expression "[train_batch(modeldef) for _ in range(warmup)]" is assigned to nothing
Error

Line: 114 Column: 5

              
    modeldef = rnn_creator(**creator_args)

    [train_batch(modeldef) for _ in range(warmup)]

    results = [train_batch(modeldef) for _ in range(nloops)]
    fwd_times, bwd_times = zip(*results)

    fwd_times = torch.tensor(fwd_times)

            

Reported by Pylint.

Redefining name 'results' from outer scope (line 262)
Error

Line: 116 Column: 5

              
    [train_batch(modeldef) for _ in range(warmup)]

    results = [train_batch(modeldef) for _ in range(nloops)]
    fwd_times, bwd_times = zip(*results)

    fwd_times = torch.tensor(fwd_times)
    bwd_times = torch.tensor(bwd_times)
    return BenchResult(name=name,

            

Reported by Pylint.

Redefining name 'args' from outer scope (line 228)
Error

Line: 130 Column: 1

                                     info_bwd=bwd_times)


def print_stderr(*args, **kwargs):
    kwargs['file'] = sys.stderr
    return print(*args, **kwargs)


def print_json_oss_format(results):

            

Reported by Pylint.

torch/testing/_internal/distributed/rpc/jit/dist_autograd_test.py
58 issues
Module 'torch' has no 'add' member
Error

Line: 17 Column: 12

              
@torch.jit.script
def local_add(t1, t2):
    return torch.add(t1, t2)


@torch.jit.script
def remote_add(t1, t2, dst: str):  # noqa: E999
    return rpc_async(dst, local_add, (t1, t2)).wait()

            

Reported by Pylint.

Instance of 'JitDistAutogradTest' has no 'rank' member
Error

Line: 34 Column: 20

              class JitDistAutogradTest(RpcAgentTestFixture):
    @dist_init
    def test_get_gradients(self):
        dst_rank = self.rank

        @torch.jit.script
        def dist_get_gradients(context_id: int) -> (Dict[Tensor, Tensor]):
            return dist_autograd.get_gradients(context_id)


            

Reported by Pylint.

Module 'torch' has no 'rand' member
Error

Line: 42 Column: 18

              
        FileCheck().check("get_gradients").run(str(dist_get_gradients.graph))
        with dist_autograd.context() as context_id:
            t1 = torch.rand((3, 3), requires_grad=True)
            t2 = torch.rand((3, 3), requires_grad=True)
            t3 = torch.add(t1, t2)

            dist_autograd.backward(context_id, [t3.sum()])
            grads = dist_get_gradients(context_id)

            

Reported by Pylint.

Module 'torch' has no 'rand' member
Error

Line: 43 Column: 18

                      FileCheck().check("get_gradients").run(str(dist_get_gradients.graph))
        with dist_autograd.context() as context_id:
            t1 = torch.rand((3, 3), requires_grad=True)
            t2 = torch.rand((3, 3), requires_grad=True)
            t3 = torch.add(t1, t2)

            dist_autograd.backward(context_id, [t3.sum()])
            grads = dist_get_gradients(context_id)


            

Reported by Pylint.

Module 'torch' has no 'add' member
Error

Line: 44 Column: 18

                      with dist_autograd.context() as context_id:
            t1 = torch.rand((3, 3), requires_grad=True)
            t2 = torch.rand((3, 3), requires_grad=True)
            t3 = torch.add(t1, t2)

            dist_autograd.backward(context_id, [t3.sum()])
            grads = dist_get_gradients(context_id)

            self.assertEqual(2, len(grads))

            

Reported by Pylint.

Instance of 'JitDistAutogradTest' has no 'assertEqual' member
Error

Line: 49 Column: 13

                          dist_autograd.backward(context_id, [t3.sum()])
            grads = dist_get_gradients(context_id)

            self.assertEqual(2, len(grads))
            self.assertIn(t1, grads)
            self.assertIn(t2, grads)
            self.assertEqual(torch.ones(3, 3), grads[t1])
            self.assertEqual(torch.ones(3, 3), grads[t2])


            

Reported by Pylint.

Instance of 'JitDistAutogradTest' has no 'assertIn' member
Error

Line: 50 Column: 13

                          grads = dist_get_gradients(context_id)

            self.assertEqual(2, len(grads))
            self.assertIn(t1, grads)
            self.assertIn(t2, grads)
            self.assertEqual(torch.ones(3, 3), grads[t1])
            self.assertEqual(torch.ones(3, 3), grads[t2])

    @dist_init

            

Reported by Pylint.

Instance of 'JitDistAutogradTest' has no 'assertIn' member
Error

Line: 51 Column: 13

              
            self.assertEqual(2, len(grads))
            self.assertIn(t1, grads)
            self.assertIn(t2, grads)
            self.assertEqual(torch.ones(3, 3), grads[t1])
            self.assertEqual(torch.ones(3, 3), grads[t2])

    @dist_init
    def test_dist_backward(self):

            

Reported by Pylint.

Module 'torch' has no 'ones' member
Error

Line: 52 Column: 30

                          self.assertEqual(2, len(grads))
            self.assertIn(t1, grads)
            self.assertIn(t2, grads)
            self.assertEqual(torch.ones(3, 3), grads[t1])
            self.assertEqual(torch.ones(3, 3), grads[t2])

    @dist_init
    def test_dist_backward(self):
        if self.rank != 0:

            

Reported by Pylint.

Instance of 'JitDistAutogradTest' has no 'assertEqual' member
Error

Line: 52 Column: 13

                          self.assertEqual(2, len(grads))
            self.assertIn(t1, grads)
            self.assertIn(t2, grads)
            self.assertEqual(torch.ones(3, 3), grads[t1])
            self.assertEqual(torch.ones(3, 3), grads[t2])

    @dist_init
    def test_dist_backward(self):
        if self.rank != 0:

            

Reported by Pylint.

test/jit/test_string_formatting.py
57 issues
Unable to import 'torch'
Error

Line: 4 Column: 1

              import os
import sys

import torch
from typing import List

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)

            

Reported by Pylint.

Unable to import 'torch.testing._internal.jit_utils'
Error

Line: 10 Column: 1

              # Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase

if __name__ == '__main__':
    raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
                       "\tpython test/test_jit.py TESTNAME\n\n"
                       "instead.")

            

Reported by Pylint.

Format string ends in middle of conversion specifier
Error

Line: 42 Column: 20

                  def test_string_interpolation_with_percent_in_string(self):
        @torch.jit.script
        def fn(arg1: str) -> str:
            return "%s in template %" % arg1    # noqa: F501

        with self.assertRaisesRegexWithHighlight(RuntimeError,
                                                 "Incomplete format specifier",
                                                 "\"%s in template %\" % arg1"):
            fn("foo")

            

Reported by Pylint.

Too many arguments for format string
Error

Line: 147 Column: 20

                  def test_string_interpolation_with_too_many_arguments(self):
        @torch.jit.script
        def fn(arg1: str, arg2: str) -> str:
            return "%s in template" % (arg1, arg2)    # noqa: F507

        with self.assertRaisesRegexWithHighlight(RuntimeError,
                                                 "Too many arguments for format string",
                                                 "\"%s in template\" % (arg1, arg2"):
            fn("foo", "bar")

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import os
import sys

import torch
from typing import List

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)

            

Reported by Pylint.

standard import "from typing import List" should be placed before "import torch"
Error

Line: 5 Column: 1

              import sys

import torch
from typing import List

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase

            

Reported by Pylint.

Import "from torch.testing._internal.jit_utils import JitTestCase" should be placed at the top of the module
Error

Line: 10 Column: 1

              # Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase

if __name__ == '__main__':
    raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
                       "\tpython test/test_jit.py TESTNAME\n\n"
                       "instead.")

            

Reported by Pylint.

Missing class docstring
Error

Line: 17 Column: 1

                                     "\tpython test/test_jit.py TESTNAME\n\n"
                       "instead.")

class TestStringFormatting(JitTestCase):

    def test_modulo_operator(self):
        def fn(dividend: int, divisor: int) -> int:
            return dividend % divisor
        self.checkScript(fn, (5, 2))

            

Reported by Pylint.

Too many public methods (22/20)
Error

Line: 17 Column: 1

                                     "\tpython test/test_jit.py TESTNAME\n\n"
                       "instead.")

class TestStringFormatting(JitTestCase):

    def test_modulo_operator(self):
        def fn(dividend: int, divisor: int) -> int:
            return dividend % divisor
        self.checkScript(fn, (5, 2))

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 19 Column: 5

              
class TestStringFormatting(JitTestCase):

    def test_modulo_operator(self):
        def fn(dividend: int, divisor: int) -> int:
            return dividend % divisor
        self.checkScript(fn, (5, 2))

    def test_string_interpolation_with_string_placeholder_and_string_variable(self):

            

Reported by Pylint.

torch/nn/quantized/modules/linear.py
57 issues
Module 'torch' has no 'qint8' member
Error

Line: 12 Column: 30

              class LinearPackedParams(torch.nn.Module):
    _version = 3

    def __init__(self, dtype=torch.qint8):
        super().__init__()
        self.dtype = dtype
        if self.dtype == torch.qint8:
            wq = torch._empty_affine_quantized([1, 1], scale=1.0, zero_point=0, dtype=torch.qint8)
        elif self.dtype == torch.float16:

            

Reported by Pylint.

Module 'torch' has no 'qint8' member
Error

Line: 15 Column: 26

                  def __init__(self, dtype=torch.qint8):
        super().__init__()
        self.dtype = dtype
        if self.dtype == torch.qint8:
            wq = torch._empty_affine_quantized([1, 1], scale=1.0, zero_point=0, dtype=torch.qint8)
        elif self.dtype == torch.float16:
            wq = torch.zeros([1, 1], dtype=torch.float)
        self.set_weight_bias(wq, None)


            

Reported by Pylint.

Module 'torch' has no 'qint8' member
Error

Line: 16 Column: 87

                      super().__init__()
        self.dtype = dtype
        if self.dtype == torch.qint8:
            wq = torch._empty_affine_quantized([1, 1], scale=1.0, zero_point=0, dtype=torch.qint8)
        elif self.dtype == torch.float16:
            wq = torch.zeros([1, 1], dtype=torch.float)
        self.set_weight_bias(wq, None)

    @torch.jit.export

            

Reported by Pylint.

Module 'torch' has no '_empty_affine_quantized' member
Error

Line: 16 Column: 18

                      super().__init__()
        self.dtype = dtype
        if self.dtype == torch.qint8:
            wq = torch._empty_affine_quantized([1, 1], scale=1.0, zero_point=0, dtype=torch.qint8)
        elif self.dtype == torch.float16:
            wq = torch.zeros([1, 1], dtype=torch.float)
        self.set_weight_bias(wq, None)

    @torch.jit.export

            

Reported by Pylint.

Module 'torch' has no 'float16' member
Error

Line: 17 Column: 28

                      self.dtype = dtype
        if self.dtype == torch.qint8:
            wq = torch._empty_affine_quantized([1, 1], scale=1.0, zero_point=0, dtype=torch.qint8)
        elif self.dtype == torch.float16:
            wq = torch.zeros([1, 1], dtype=torch.float)
        self.set_weight_bias(wq, None)

    @torch.jit.export
    def set_weight_bias(self, weight: torch.Tensor, bias: Optional[torch.Tensor]) -> None:

            

Reported by Pylint.

Module 'torch' has no 'zeros' member
Error

Line: 18 Column: 18

                      if self.dtype == torch.qint8:
            wq = torch._empty_affine_quantized([1, 1], scale=1.0, zero_point=0, dtype=torch.qint8)
        elif self.dtype == torch.float16:
            wq = torch.zeros([1, 1], dtype=torch.float)
        self.set_weight_bias(wq, None)

    @torch.jit.export
    def set_weight_bias(self, weight: torch.Tensor, bias: Optional[torch.Tensor]) -> None:
        if self.dtype == torch.qint8:

            

Reported by Pylint.

Module 'torch' has no 'float' member
Error

Line: 18 Column: 44

                      if self.dtype == torch.qint8:
            wq = torch._empty_affine_quantized([1, 1], scale=1.0, zero_point=0, dtype=torch.qint8)
        elif self.dtype == torch.float16:
            wq = torch.zeros([1, 1], dtype=torch.float)
        self.set_weight_bias(wq, None)

    @torch.jit.export
    def set_weight_bias(self, weight: torch.Tensor, bias: Optional[torch.Tensor]) -> None:
        if self.dtype == torch.qint8:

            

Reported by Pylint.

Module 'torch' has no 'qint8' member
Error

Line: 23 Column: 26

              
    @torch.jit.export
    def set_weight_bias(self, weight: torch.Tensor, bias: Optional[torch.Tensor]) -> None:
        if self.dtype == torch.qint8:
            self._packed_params = torch.ops.quantized.linear_prepack(weight, bias)
        elif self.dtype == torch.float16:
            self._packed_params = torch.ops.quantized.linear_prepack_fp16(weight, bias)
        else:
            raise RuntimeError('Unsupported dtype on dynamic quantized linear!')

            

Reported by Pylint.

Module 'torch' has no 'float16' member
Error

Line: 25 Column: 28

                  def set_weight_bias(self, weight: torch.Tensor, bias: Optional[torch.Tensor]) -> None:
        if self.dtype == torch.qint8:
            self._packed_params = torch.ops.quantized.linear_prepack(weight, bias)
        elif self.dtype == torch.float16:
            self._packed_params = torch.ops.quantized.linear_prepack_fp16(weight, bias)
        else:
            raise RuntimeError('Unsupported dtype on dynamic quantized linear!')



            

Reported by Pylint.

Module 'torch' has no 'qint8' member
Error

Line: 33 Column: 26

              
    @torch.jit.export
    def _weight_bias(self):
        if self.dtype == torch.qint8:
            return torch.ops.quantized.linear_unpack(self._packed_params)
        elif self.dtype == torch.float16:
            return torch.ops.quantized.linear_unpack_fp16(self._packed_params)
        else:
            raise RuntimeError('Unsupported dtype on dynamic quantized linear!')

            

Reported by Pylint.

test/distributed/test_jit_c10d.py
57 issues
Unable to import 'torch'
Error

Line: 3 Column: 1

              import tempfile
import sys
import torch
import torch.distributed as c10d
import time
from typing import List

from torch.testing._internal.common_distributed import requires_nccl, create_tcp_store
from torch.testing._internal.common_utils import load_tests, TEST_WITH_TSAN, run_tests, sandcastle_skip_if

            

Reported by Pylint.

Unable to import 'torch.distributed'
Error

Line: 4 Column: 1

              import tempfile
import sys
import torch
import torch.distributed as c10d
import time
from typing import List

from torch.testing._internal.common_distributed import requires_nccl, create_tcp_store
from torch.testing._internal.common_utils import load_tests, TEST_WITH_TSAN, run_tests, sandcastle_skip_if

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_distributed'
Error

Line: 8 Column: 1

              import time
from typing import List

from torch.testing._internal.common_distributed import requires_nccl, create_tcp_store
from torch.testing._internal.common_utils import load_tests, TEST_WITH_TSAN, run_tests, sandcastle_skip_if
from torch.testing._internal.jit_utils import JitTestCase

# load_tests from common_utils is used to automatically filter tests for
# sharding on sandcastle. This line silences flake warnings

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 9 Column: 1

              from typing import List

from torch.testing._internal.common_distributed import requires_nccl, create_tcp_store
from torch.testing._internal.common_utils import load_tests, TEST_WITH_TSAN, run_tests, sandcastle_skip_if
from torch.testing._internal.jit_utils import JitTestCase

# load_tests from common_utils is used to automatically filter tests for
# sharding on sandcastle. This line silences flake warnings
load_tests = load_tests

            

Reported by Pylint.

Unable to import 'torch.testing._internal.jit_utils'
Error

Line: 10 Column: 1

              
from torch.testing._internal.common_distributed import requires_nccl, create_tcp_store
from torch.testing._internal.common_utils import load_tests, TEST_WITH_TSAN, run_tests, sandcastle_skip_if
from torch.testing._internal.jit_utils import JitTestCase

# load_tests from common_utils is used to automatically filter tests for
# sharding on sandcastle. This line silences flake warnings
load_tests = load_tests


            

Reported by Pylint.

Assigning the same variable 'load_tests' to itself
Error

Line: 14 Column: 1

              
# load_tests from common_utils is used to automatically filter tests for
# sharding on sandcastle. This line silences flake warnings
load_tests = load_tests

if not c10d.is_available():
    print('c10d not available, skipping tests', file=sys.stderr)
    sys.exit(0)


            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 68 Column: 9

                  def test_process_group_nccl_torchbind_alltoall(self):
        nccl_pg = self._create_nccl_pg("process_group_nccl_as_base_class")

        input = torch.rand(16).cuda()
        output = torch.rand(16).cuda()

        @torch.jit.script
        def run_pg_nccl_alltoall(
            pg: torch.classes.dist_c10d.ProcessGroupNCCL,

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 75 Column: 13

                      def run_pg_nccl_alltoall(
            pg: torch.classes.dist_c10d.ProcessGroupNCCL,
            output: torch.Tensor,
            input: torch.Tensor
        ):
            output_split_sizes: List[int] = []
            input_split_sizes: List[int] = []
            work = pg.alltoall_base(output, input, output_split_sizes, input_split_sizes)
            work.wait()

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 97 Column: 9

                      name = unique_process_group_name("alltoall_test_process_group")
        nccl_pg = self._create_nccl_pg_as_base_process_group(name)

        input = torch.rand(16).cuda()
        output = torch.rand(16).cuda()

        @torch.jit.script
        def run_pg_nccl_alltoall(
            pg: torch.classes.dist_c10d.ProcessGroup,

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 104 Column: 13

                      def run_pg_nccl_alltoall(
            pg: torch.classes.dist_c10d.ProcessGroup,
            output: torch.Tensor,
            input: torch.Tensor
        ):
            output_split_sizes: List[int] = []
            input_split_sizes: List[int] = []
            work = pg.alltoall_base(output, input, output_split_sizes, input_split_sizes)
            work.wait()

            

Reported by Pylint.

caffe2/python/trt/test_pt_onnx_trt.py
57 issues
Unable to import 'torchvision.models'
Error

Line: 22 Column: 1

              from PIL import Image
import numpy as np
import torch
import torchvision.models as models

import pycuda.driver as cuda
# This import causes pycuda to automatically manage CUDA context creation and cleanup.
import pycuda.autoinit


            

Reported by Pylint.

Unable to import 'pycuda.driver'
Error

Line: 24 Column: 1

              import torch
import torchvision.models as models

import pycuda.driver as cuda
# This import causes pycuda to automatically manage CUDA context creation and cleanup.
import pycuda.autoinit

import tensorrt as trt
TRT_LOGGER = trt.Logger(trt.Logger.WARNING)

            

Reported by Pylint.

Unable to import 'pycuda.autoinit'
Error

Line: 26 Column: 1

              
import pycuda.driver as cuda
# This import causes pycuda to automatically manage CUDA context creation and cleanup.
import pycuda.autoinit

import tensorrt as trt
TRT_LOGGER = trt.Logger(trt.Logger.WARNING)

def allocate_buffers(engine):

            

Reported by Pylint.

Unable to import 'tensorrt'
Error

Line: 28 Column: 1

              # This import causes pycuda to automatically manage CUDA context creation and cleanup.
import pycuda.autoinit

import tensorrt as trt
TRT_LOGGER = trt.Logger(trt.Logger.WARNING)

def allocate_buffers(engine):
    h_input = cuda.pagelocked_empty(trt.volume(engine.get_binding_shape(0)),
                                    dtype=trt.nptype(trt.float32))

            

Reported by Pylint.

Module 'torch' has no 'randn' member
Error

Line: 82 Column: 25

                      model = getattr(models, model_name)(pretrained=True)

        shape = (1,) + input_shape
        dummy_input  = (torch.randn(shape),)
        onnx_name = model_name + ".onnx"

        torch.onnx.export(model,
                          dummy_input,
                          onnx_name,

            

Reported by Pylint.

Unused import pycuda.autoinit
Error

Line: 26 Column: 1

              
import pycuda.driver as cuda
# This import causes pycuda to automatically manage CUDA context creation and cleanup.
import pycuda.autoinit

import tensorrt as trt
TRT_LOGGER = trt.Logger(trt.Logger.WARNING)

def allocate_buffers(engine):

            

Reported by Pylint.

Unused variable 'c'
Error

Line: 43 Column: 9

              
def load_normalized_test_case(input_shape, test_image, pagelocked_buffer, normalization_hint):
    def normalize_image(image):
        c, h, w = input_shape
        image_arr = np.asarray(image.resize((w, h), Image.ANTIALIAS)).transpose([2, 0, 1])\
            .astype(trt.nptype(trt.float32)).ravel()
        if (normalization_hint == 0):
            return (image_arr / 255.0 - 0.45) / 0.225
        elif (normalization_hint == 1):

            

Reported by Pylint.

Unused variable 'index'
Error

Line: 98 Column: 21

                          h_input, d_input, h_output, d_output, stream = allocate_buffers(engine)
            with engine.create_execution_context() as context:
                err_count = 0
                for index, f in enumerate(self.image_files):
                    test_case = load_normalized_test_case(input_shape, f,\
                        h_input, normalization_hint)
                    cuda.memcpy_htod_async(d_input, h_input, stream)

                    context.execute_async_v2(bindings=[d_input, d_output],

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              ###################################################################################################
# ATTENTION! This test will most probably fail if you install TensorRT 6.0.1 only.
# That's because it's shipped with older version of ONNX parser not supporting some
# required features. To make it work please use new version: https://github.com/onnx/onnx-tensorrt
# Just clone it and do something like this:
#
# ~/pt/third_party/onnx-tensorrt$ mkdir build/
# ~/pt/third_party/onnx-tensorrt$ cd build/
# ~/pt/third_party/onnx-tensorrt/build$ cmake ..

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 31 Column: 1

              import tensorrt as trt
TRT_LOGGER = trt.Logger(trt.Logger.WARNING)

def allocate_buffers(engine):
    h_input = cuda.pagelocked_empty(trt.volume(engine.get_binding_shape(0)),
                                    dtype=trt.nptype(trt.float32))
    h_output = cuda.pagelocked_empty(trt.volume(engine.get_binding_shape(1)),
                                     dtype=trt.nptype(trt.float32))
    d_input = cuda.mem_alloc(h_input.nbytes)

            

Reported by Pylint.

torch/jit/_builtins.py
57 issues
Attempted relative import beyond top-level package
Error

Line: 9 Column: 1

              import torch.backends.cudnn as cudnn

from torch._six import PY37
from ..nn.modules.utils import _single, _pair, _triple, _quadruple, _list_with_default

from collections import OrderedDict
from typing import Dict, Optional

_builtin_table: Optional[Dict[int, str]] = None

            

Reported by Pylint.

Module 'torch._VF' has no 'stft' member
Error

Line: 100 Column: 6

                  (torch.nn.init._no_grad_zero_, "aten::_no_grad_zero_"),
    (torch._C._get_tracing_state, "aten::_get_tracing_state"),
    (warnings.warn, "aten::warn"),
    (torch._VF.stft, "aten::stft"),  # type: ignore[attr-defined]
    (torch._VF.istft, "aten::istft"),  # type: ignore[attr-defined]
    (torch._VF.cdist, "aten::cdist"),  # type: ignore[attr-defined]
    (torch._VF.norm, "aten::norm"),  # type: ignore[attr-defined]
    (torch._VF.unique_dim, "aten::unique_dim"),
    (torch._VF.unique_consecutive, "aten::unique_consecutive"),  # type: ignore[attr-defined]

            

Reported by Pylint.

Module 'torch._VF' has no 'istft' member
Error

Line: 101 Column: 6

                  (torch._C._get_tracing_state, "aten::_get_tracing_state"),
    (warnings.warn, "aten::warn"),
    (torch._VF.stft, "aten::stft"),  # type: ignore[attr-defined]
    (torch._VF.istft, "aten::istft"),  # type: ignore[attr-defined]
    (torch._VF.cdist, "aten::cdist"),  # type: ignore[attr-defined]
    (torch._VF.norm, "aten::norm"),  # type: ignore[attr-defined]
    (torch._VF.unique_dim, "aten::unique_dim"),
    (torch._VF.unique_consecutive, "aten::unique_consecutive"),  # type: ignore[attr-defined]
    (torch._VF.nuclear_norm, "aten::nuclear_norm"),

            

Reported by Pylint.

Module 'torch._VF' has no 'cdist' member
Error

Line: 102 Column: 6

                  (warnings.warn, "aten::warn"),
    (torch._VF.stft, "aten::stft"),  # type: ignore[attr-defined]
    (torch._VF.istft, "aten::istft"),  # type: ignore[attr-defined]
    (torch._VF.cdist, "aten::cdist"),  # type: ignore[attr-defined]
    (torch._VF.norm, "aten::norm"),  # type: ignore[attr-defined]
    (torch._VF.unique_dim, "aten::unique_dim"),
    (torch._VF.unique_consecutive, "aten::unique_consecutive"),  # type: ignore[attr-defined]
    (torch._VF.nuclear_norm, "aten::nuclear_norm"),
    (torch._VF.frobenius_norm, "aten::frobenius_norm"),

            

Reported by Pylint.

Module 'torch._VF' has no 'norm' member
Error

Line: 103 Column: 6

                  (torch._VF.stft, "aten::stft"),  # type: ignore[attr-defined]
    (torch._VF.istft, "aten::istft"),  # type: ignore[attr-defined]
    (torch._VF.cdist, "aten::cdist"),  # type: ignore[attr-defined]
    (torch._VF.norm, "aten::norm"),  # type: ignore[attr-defined]
    (torch._VF.unique_dim, "aten::unique_dim"),
    (torch._VF.unique_consecutive, "aten::unique_consecutive"),  # type: ignore[attr-defined]
    (torch._VF.nuclear_norm, "aten::nuclear_norm"),
    (torch._VF.frobenius_norm, "aten::frobenius_norm"),
    (torch._VF.tensordot, "aten::tensordot"),  # type: ignore[attr-defined]

            

Reported by Pylint.

Module 'torch._VF' has no 'unique_dim' member
Error

Line: 104 Column: 6

                  (torch._VF.istft, "aten::istft"),  # type: ignore[attr-defined]
    (torch._VF.cdist, "aten::cdist"),  # type: ignore[attr-defined]
    (torch._VF.norm, "aten::norm"),  # type: ignore[attr-defined]
    (torch._VF.unique_dim, "aten::unique_dim"),
    (torch._VF.unique_consecutive, "aten::unique_consecutive"),  # type: ignore[attr-defined]
    (torch._VF.nuclear_norm, "aten::nuclear_norm"),
    (torch._VF.frobenius_norm, "aten::frobenius_norm"),
    (torch._VF.tensordot, "aten::tensordot"),  # type: ignore[attr-defined]
]

            

Reported by Pylint.

Module 'torch._VF' has no 'unique_consecutive' member
Error

Line: 105 Column: 6

                  (torch._VF.cdist, "aten::cdist"),  # type: ignore[attr-defined]
    (torch._VF.norm, "aten::norm"),  # type: ignore[attr-defined]
    (torch._VF.unique_dim, "aten::unique_dim"),
    (torch._VF.unique_consecutive, "aten::unique_consecutive"),  # type: ignore[attr-defined]
    (torch._VF.nuclear_norm, "aten::nuclear_norm"),
    (torch._VF.frobenius_norm, "aten::frobenius_norm"),
    (torch._VF.tensordot, "aten::tensordot"),  # type: ignore[attr-defined]
]


            

Reported by Pylint.

Module 'torch._VF' has no 'nuclear_norm' member
Error

Line: 106 Column: 6

                  (torch._VF.norm, "aten::norm"),  # type: ignore[attr-defined]
    (torch._VF.unique_dim, "aten::unique_dim"),
    (torch._VF.unique_consecutive, "aten::unique_consecutive"),  # type: ignore[attr-defined]
    (torch._VF.nuclear_norm, "aten::nuclear_norm"),
    (torch._VF.frobenius_norm, "aten::frobenius_norm"),
    (torch._VF.tensordot, "aten::tensordot"),  # type: ignore[attr-defined]
]

# ops in torch.functional are bound to torch

            

Reported by Pylint.

Module 'torch._VF' has no 'frobenius_norm' member
Error

Line: 107 Column: 6

                  (torch._VF.unique_dim, "aten::unique_dim"),
    (torch._VF.unique_consecutive, "aten::unique_consecutive"),  # type: ignore[attr-defined]
    (torch._VF.nuclear_norm, "aten::nuclear_norm"),
    (torch._VF.frobenius_norm, "aten::frobenius_norm"),
    (torch._VF.tensordot, "aten::tensordot"),  # type: ignore[attr-defined]
]

# ops in torch.functional are bound to torch
# in these cases, we want to resolve the function to their python implementation

            

Reported by Pylint.

Module 'torch._VF' has no 'tensordot' member
Error

Line: 108 Column: 6

                  (torch._VF.unique_consecutive, "aten::unique_consecutive"),  # type: ignore[attr-defined]
    (torch._VF.nuclear_norm, "aten::nuclear_norm"),
    (torch._VF.frobenius_norm, "aten::frobenius_norm"),
    (torch._VF.tensordot, "aten::tensordot"),  # type: ignore[attr-defined]
]

# ops in torch.functional are bound to torch
# in these cases, we want to resolve the function to their python implementation
# instead looking up a builtin "aten::" schema

            

Reported by Pylint.

test/distributed/pipeline/sync/test_deferred_batch_norm.py
57 issues
Unable to import 'pytest'
Error

Line: 10 Column: 1

              from copy import deepcopy
from itertools import chain

import pytest
import torch
from torch import nn, optim

from torch.distributed.pipeline.sync.batchnorm import DeferredBatchNorm


            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 11 Column: 1

              from itertools import chain

import pytest
import torch
from torch import nn, optim

from torch.distributed.pipeline.sync.batchnorm import DeferredBatchNorm

CHUNKS = 4

            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 12 Column: 1

              
import pytest
import torch
from torch import nn, optim

from torch.distributed.pipeline.sync.batchnorm import DeferredBatchNorm

CHUNKS = 4


            

Reported by Pylint.

Unable to import 'torch.distributed.pipeline.sync.batchnorm'
Error

Line: 14 Column: 1

              import torch
from torch import nn, optim

from torch.distributed.pipeline.sync.batchnorm import DeferredBatchNorm

CHUNKS = 4


def tilt_dist(input):

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 19 Column: 15

              CHUNKS = 4


def tilt_dist(input):
    # Tilt variance by channel.
    rgb = input.transpose(0, 1)
    rgb[0] *= 1
    rgb[1] *= 10
    rgb[2] *= 100

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 33 Column: 28

                  return input


def chunked_forward(model, input, chunks=CHUNKS):
    output_chunks = []

    for chunk in input.chunk(chunks):
        output_chunks.append(model(chunk))


            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 75 Column: 5

                  bn = nn.BatchNorm2d(3, momentum=momentum)
    dbn = DeferredBatchNorm.convert_deferred_batch_norm(deepcopy(bn), chunks=CHUNKS)

    input = torch.rand(16, 3, 224, 224)
    input = tilt_dist(input)

    bn(input)
    chunked_forward(dbn, input)


            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 102 Column: 5

                  bn = nn.BatchNorm2d(3)
    dbn = DeferredBatchNorm.convert_deferred_batch_norm(deepcopy(bn), chunks=CHUNKS)

    input = torch.rand(16, 3, 224, 224)
    input = tilt_dist(input)

    bn(input)
    chunked_forward(dbn, input)


            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 121 Column: 9

                  opt = optim.SGD(chain(bn.parameters(), dbn.parameters()), lr=1.0)

    for i in range(5):
        input = torch.rand(16, 3, 224, 224)
        input = tilt_dist(input)

        # train
        y = bn(input)
        a = y.sum()

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 147 Column: 5

                  bn = nn.Sequential(nn.Conv2d(3, 3, 1), nn.BatchNorm2d(3))
    dbn = DeferredBatchNorm.convert_deferred_batch_norm(deepcopy(bn), chunks=CHUNKS)

    input = torch.rand(16, 3, 224, 224)
    input = tilt_dist(input)

    opt = optim.SGD(chain(bn.parameters(), dbn.parameters()), lr=0.1)

    # 1st step

            

Reported by Pylint.

test/package/test_package_fx.py
57 issues
Unable to import 'torch'
Error

Line: 3 Column: 1

              from io import BytesIO

import torch
from torch.fx import Graph, GraphModule, symbolic_trace
from torch.package import (
    ObjMismatchError,
    PackageExporter,
    PackageImporter,
    sys_importer,

            

Reported by Pylint.

Unable to import 'torch.fx'
Error

Line: 4 Column: 1

              from io import BytesIO

import torch
from torch.fx import Graph, GraphModule, symbolic_trace
from torch.package import (
    ObjMismatchError,
    PackageExporter,
    PackageImporter,
    sys_importer,

            

Reported by Pylint.

Unable to import 'torch.package'
Error

Line: 5 Column: 1

              
import torch
from torch.fx import Graph, GraphModule, symbolic_trace
from torch.package import (
    ObjMismatchError,
    PackageExporter,
    PackageImporter,
    sys_importer,
)

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 11 Column: 1

                  PackageImporter,
    sys_importer,
)
from torch.testing._internal.common_utils import run_tests

try:
    from .common import PackageTestCase
except ImportError:
    # Support the case where we run this file directly.

            

Reported by Pylint.

Unable to import 'package_a.test_module'
Error

Line: 42 Column: 9

                      self.assertTrue(torch.allclose(loaded_traced(input), traced(input)))

    def test_package_then_fx(self):
        from package_a.test_module import SimpleTest

        model = SimpleTest()
        f = BytesIO()
        with PackageExporter(f) as pe:
            pe.intern("**")

            

Reported by Pylint.

Unable to import 'package_a.test_module'
Error

Line: 58 Column: 9

                      self.assertTrue(torch.allclose(loaded(input), traced(input)))

    def test_package_fx_package(self):
        from package_a.test_module import SimpleTest

        model = SimpleTest()
        f = BytesIO()
        with PackageExporter(f) as pe:
            pe.intern("**")

            

Reported by Pylint.

Unable to import 'package_a.subpackage'
Error

Line: 93 Column: 9

                      self.assertTrue(torch.allclose(loaded(input), loaded2(input)))

    def test_package_fx_with_imports(self):
        import package_a.subpackage

        # Manually construct a graph that invokes a leaf function
        graph = Graph()
        a = graph.placeholder("x")
        b = graph.placeholder("y")

            

Reported by Pylint.

Unable to import 'package_a.test_all_leaf_modules_tracer'
Error

Line: 125 Column: 9

                      self.assertTrue(packaged_dependency is not package_a.subpackage)

    def test_package_fx_custom_tracer(self):
        from package_a.test_all_leaf_modules_tracer import TestAllLeafModulesTracer
        from package_a.test_module import SimpleTest, ModWithTwoSubmodsAndTensor

        class SpecialGraphModule(torch.fx.GraphModule):
            def __init__(self, root, graph, info):
                super().__init__(root, graph)

            

Reported by Pylint.

Unable to import 'package_a.test_module'
Error

Line: 126 Column: 9

              
    def test_package_fx_custom_tracer(self):
        from package_a.test_all_leaf_modules_tracer import TestAllLeafModulesTracer
        from package_a.test_module import SimpleTest, ModWithTwoSubmodsAndTensor

        class SpecialGraphModule(torch.fx.GraphModule):
            def __init__(self, root, graph, info):
                super().__init__(root, graph)
                self.info = info

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 38 Column: 9

                      f.seek(0)
        pi = PackageImporter(f)
        loaded_traced = pi.load_pickle("model", "model.pkl")
        input = torch.rand(2, 3)
        self.assertTrue(torch.allclose(loaded_traced(input), traced(input)))

    def test_package_then_fx(self):
        from package_a.test_module import SimpleTest


            

Reported by Pylint.

tools/stats/s3_stat_parser.py
57 issues
Unable to import 'typing_extensions'
Error

Line: 8 Column: 1

              from collections import defaultdict
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Tuple, Union, Any, cast
from typing_extensions import Literal, TypedDict

try:
    import boto3  # type: ignore[import]
    import botocore  # type: ignore[import]
    HAVE_BOTO3 = True

            

Reported by Pylint.

Redefining built-in 'object'
Error

Line: 96 Column: 49

                  return S3_RESOURCE_READ_ONLY.Bucket(bucket_name)


def get_S3_object_from_bucket(bucket_name: str, object: str) -> Any:
    return S3_RESOURCE.Object(bucket_name, object)


def case_status(case: Version1Case) -> Status:
    for k in {'errored', 'failed', 'skipped'}:

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 210 Column: 9

                  commit_index = 0
    while len(reports) == 0 and commit_index < len(commits):
        commit = commits[commit_index]
        logger.info(f'Grabbing reports from commit: {commit}')
        summaries = get_test_stats_summaries_for_job(sha=commit, job_prefix=ci_job_prefix)
        for job_name, summary in summaries.items():
            reports.append(summary[0])
            if len(summary) > 1:
                logger.warning(f'WARNING: Multiple summary objects found for {commit}/{job_name}')

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 215 Column: 17

                      for job_name, summary in summaries.items():
            reports.append(summary[0])
            if len(summary) > 1:
                logger.warning(f'WARNING: Multiple summary objects found for {commit}/{job_name}')
        commit_index += 1
    return reports


def get_previous_reports_for_pr(pr: str, ci_job_prefix: str = "") -> List[Tuple[Report, str]]:

            

Reported by Pylint.

Use lazy % formatting in logging functions
Error

Line: 222 Column: 5

              
def get_previous_reports_for_pr(pr: str, ci_job_prefix: str = "") -> List[Tuple[Report, str]]:
    reports: List[Tuple[Report, str]] = []
    logger.info(f'Grabbing reports from PR: {[pr]}')
    summaries = get_test_stats_summaries_for_pr(pr=pr, job_prefix=ci_job_prefix)
    for _, summary in summaries.items():
        reports.extend(summary)
    # sort by summary_timestamp
    reports.sort(reverse=True, key=lambda s: s[1])

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import bz2
import json
import logging
import subprocess
from collections import defaultdict
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Tuple, Union, Any, cast
from typing_extensions import Literal, TypedDict


            

Reported by Pylint.

Consider possible security implications associated with subprocess module.
Security blacklist

Line: 4
Suggestion: https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b404-import-subprocess

              import bz2
import json
import logging
import subprocess
from collections import defaultdict
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Tuple, Union, Any, cast
from typing_extensions import Literal, TypedDict


            

Reported by Bandit.

Missing class docstring
Error

Line: 27 Column: 1

              Status = Optional[Literal['errored', 'failed', 'skipped']]


class CaseMeta(TypedDict):
    seconds: float


class Version1Case(CaseMeta):
    name: str

            

Reported by Pylint.

Too few public methods (0/2)
Error

Line: 27 Column: 1

              Status = Optional[Literal['errored', 'failed', 'skipped']]


class CaseMeta(TypedDict):
    seconds: float


class Version1Case(CaseMeta):
    name: str

            

Reported by Pylint.

Missing class docstring
Error

Line: 31 Column: 1

                  seconds: float


class Version1Case(CaseMeta):
    name: str
    errored: bool
    failed: bool
    skipped: bool


            

Reported by Pylint.