The following issues were found

caffe2/python/workspace.py
226 issues
Unable to import 'google.protobuf.message'
Error

Line: 9 Column: 1

              
import collections
import contextlib
from google.protobuf.message import Message
from multiprocessing import Process
import os
from collections import defaultdict
import logging
import numpy as np

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'blobs' member
Error

Line: 28 Column: 9

              
logger = logging.getLogger(__name__)

Blobs = C.blobs
ResetBlob = C.reset_blob
CreateBlob = C.create_blob
CurrentWorkspace = C.current_workspace
DeserializeBlob = C.deserialize_blob
GlobalInit = C.global_init

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'reset_blob' member
Error

Line: 29 Column: 13

              logger = logging.getLogger(__name__)

Blobs = C.blobs
ResetBlob = C.reset_blob
CreateBlob = C.create_blob
CurrentWorkspace = C.current_workspace
DeserializeBlob = C.deserialize_blob
GlobalInit = C.global_init
HasBlob = C.has_blob

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'create_blob' member
Error

Line: 30 Column: 14

              
Blobs = C.blobs
ResetBlob = C.reset_blob
CreateBlob = C.create_blob
CurrentWorkspace = C.current_workspace
DeserializeBlob = C.deserialize_blob
GlobalInit = C.global_init
HasBlob = C.has_blob
RegisteredOperators = C.registered_operators

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'current_workspace' member
Error

Line: 31 Column: 20

              Blobs = C.blobs
ResetBlob = C.reset_blob
CreateBlob = C.create_blob
CurrentWorkspace = C.current_workspace
DeserializeBlob = C.deserialize_blob
GlobalInit = C.global_init
HasBlob = C.has_blob
RegisteredOperators = C.registered_operators
SerializeBlob = C.serialize_blob

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'deserialize_blob' member
Error

Line: 32 Column: 19

              ResetBlob = C.reset_blob
CreateBlob = C.create_blob
CurrentWorkspace = C.current_workspace
DeserializeBlob = C.deserialize_blob
GlobalInit = C.global_init
HasBlob = C.has_blob
RegisteredOperators = C.registered_operators
SerializeBlob = C.serialize_blob
SwitchWorkspace = C.switch_workspace

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'global_init' member
Error

Line: 33 Column: 14

              CreateBlob = C.create_blob
CurrentWorkspace = C.current_workspace
DeserializeBlob = C.deserialize_blob
GlobalInit = C.global_init
HasBlob = C.has_blob
RegisteredOperators = C.registered_operators
SerializeBlob = C.serialize_blob
SwitchWorkspace = C.switch_workspace
RootFolder = C.root_folder

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'has_blob' member
Error

Line: 34 Column: 11

              CurrentWorkspace = C.current_workspace
DeserializeBlob = C.deserialize_blob
GlobalInit = C.global_init
HasBlob = C.has_blob
RegisteredOperators = C.registered_operators
SerializeBlob = C.serialize_blob
SwitchWorkspace = C.switch_workspace
RootFolder = C.root_folder
Workspaces = C.workspaces

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'registered_operators' member
Error

Line: 35 Column: 23

              DeserializeBlob = C.deserialize_blob
GlobalInit = C.global_init
HasBlob = C.has_blob
RegisteredOperators = C.registered_operators
SerializeBlob = C.serialize_blob
SwitchWorkspace = C.switch_workspace
RootFolder = C.root_folder
Workspaces = C.workspaces
BenchmarkNet = C.benchmark_net

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'serialize_blob' member
Error

Line: 36 Column: 17

              GlobalInit = C.global_init
HasBlob = C.has_blob
RegisteredOperators = C.registered_operators
SerializeBlob = C.serialize_blob
SwitchWorkspace = C.switch_workspace
RootFolder = C.root_folder
Workspaces = C.workspaces
BenchmarkNet = C.benchmark_net
BenchmarkNetOnce = C.benchmark_net_once

            

Reported by Pylint.

test/test_tensorboard.py
225 issues
Unable to import 'torch'
Error

Line: 41 Column: 1

                  TEST_MATPLOTLIB = False
skipIfNoMatplotlib = unittest.skipIf(not TEST_MATPLOTLIB, "no matplotlib")

import torch
from torch.testing._internal.common_utils import TestCase, run_tests, TEST_WITH_ASAN

def tensor_N(shape, dtype=float):
    numel = np.prod(shape)
    x = (np.arange(numel, dtype=dtype)).reshape(shape)

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 42 Column: 1

              skipIfNoMatplotlib = unittest.skipIf(not TEST_MATPLOTLIB, "no matplotlib")

import torch
from torch.testing._internal.common_utils import TestCase, run_tests, TEST_WITH_ASAN

def tensor_N(shape, dtype=float):
    numel = np.prod(shape)
    x = (np.arange(numel, dtype=dtype)).reshape(shape)
    return x

            

Reported by Pylint.

Unable to import 'tensorboard.compat.proto.graph_pb2'
Error

Line: 70 Column: 5

              

if TEST_TENSORBOARD:
    from tensorboard.compat.proto.graph_pb2 import GraphDef
    from torch.utils.tensorboard import summary, SummaryWriter
    from torch.utils.tensorboard._utils import _prepare_video, convert_to_HWC
    from torch.utils.tensorboard._convert_np import make_np
    from torch.utils.tensorboard import _caffe2_graph as c2_graph
    from torch.utils.tensorboard._pytorch_graph import graph

            

Reported by Pylint.

Unable to import 'torch.utils.tensorboard'
Error

Line: 71 Column: 5

              
if TEST_TENSORBOARD:
    from tensorboard.compat.proto.graph_pb2 import GraphDef
    from torch.utils.tensorboard import summary, SummaryWriter
    from torch.utils.tensorboard._utils import _prepare_video, convert_to_HWC
    from torch.utils.tensorboard._convert_np import make_np
    from torch.utils.tensorboard import _caffe2_graph as c2_graph
    from torch.utils.tensorboard._pytorch_graph import graph
    from google.protobuf import text_format

            

Reported by Pylint.

Unable to import 'torch.utils.tensorboard._utils'
Error

Line: 72 Column: 5

              if TEST_TENSORBOARD:
    from tensorboard.compat.proto.graph_pb2 import GraphDef
    from torch.utils.tensorboard import summary, SummaryWriter
    from torch.utils.tensorboard._utils import _prepare_video, convert_to_HWC
    from torch.utils.tensorboard._convert_np import make_np
    from torch.utils.tensorboard import _caffe2_graph as c2_graph
    from torch.utils.tensorboard._pytorch_graph import graph
    from google.protobuf import text_format
    from PIL import Image

            

Reported by Pylint.

Unable to import 'torch.utils.tensorboard._convert_np'
Error

Line: 73 Column: 5

                  from tensorboard.compat.proto.graph_pb2 import GraphDef
    from torch.utils.tensorboard import summary, SummaryWriter
    from torch.utils.tensorboard._utils import _prepare_video, convert_to_HWC
    from torch.utils.tensorboard._convert_np import make_np
    from torch.utils.tensorboard import _caffe2_graph as c2_graph
    from torch.utils.tensorboard._pytorch_graph import graph
    from google.protobuf import text_format
    from PIL import Image


            

Reported by Pylint.

Unable to import 'torch.utils.tensorboard'
Error

Line: 74 Column: 5

                  from torch.utils.tensorboard import summary, SummaryWriter
    from torch.utils.tensorboard._utils import _prepare_video, convert_to_HWC
    from torch.utils.tensorboard._convert_np import make_np
    from torch.utils.tensorboard import _caffe2_graph as c2_graph
    from torch.utils.tensorboard._pytorch_graph import graph
    from google.protobuf import text_format
    from PIL import Image

class TestTensorBoardPyTorchNumpy(BaseTestCase):

            

Reported by Pylint.

Unable to import 'torch.utils.tensorboard._pytorch_graph'
Error

Line: 75 Column: 5

                  from torch.utils.tensorboard._utils import _prepare_video, convert_to_HWC
    from torch.utils.tensorboard._convert_np import make_np
    from torch.utils.tensorboard import _caffe2_graph as c2_graph
    from torch.utils.tensorboard._pytorch_graph import graph
    from google.protobuf import text_format
    from PIL import Image

class TestTensorBoardPyTorchNumpy(BaseTestCase):
    def test_pytorch_np(self):

            

Reported by Pylint.

Unable to import 'google.protobuf'
Error

Line: 76 Column: 5

                  from torch.utils.tensorboard._convert_np import make_np
    from torch.utils.tensorboard import _caffe2_graph as c2_graph
    from torch.utils.tensorboard._pytorch_graph import graph
    from google.protobuf import text_format
    from PIL import Image

class TestTensorBoardPyTorchNumpy(BaseTestCase):
    def test_pytorch_np(self):
        tensors = [torch.rand(3, 10, 10), torch.rand(1), torch.rand(1, 2, 3, 4, 5)]

            

Reported by Pylint.

Unused import tensorboard.summary.writer.event_file_writer
Error

Line: 11 Column: 5

              
TEST_TENSORBOARD = True
try:
    import tensorboard.summary.writer.event_file_writer  # noqa: F401
    from tensorboard.compat.proto.summary_pb2 import Summary
except ImportError:
    TEST_TENSORBOARD = False

HAS_TORCHVISION = True

            

Reported by Pylint.

test/jit/test_torchbind.py
224 issues
Unable to import 'torch'
Error

Line: 7 Column: 1

              import copy
import unittest

import torch
from typing import Optional

# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)

            

Reported by Pylint.

Unable to import 'torch.testing._internal.jit_utils'
Error

Line: 13 Column: 1

              # Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase
from torch.testing._internal.common_utils import (
    IS_FBCODE,
    IS_MACOS,
    IS_SANDCASTLE,
    IS_WINDOWS,

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 14 Column: 1

              pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase
from torch.testing._internal.common_utils import (
    IS_FBCODE,
    IS_MACOS,
    IS_SANDCASTLE,
    IS_WINDOWS,
    find_library_location,

            

Reported by Pylint.

Unable to import 'torch.testing'
Error

Line: 21 Column: 1

                  IS_WINDOWS,
    find_library_location,
)
from torch.testing import FileCheck

if __name__ == "__main__":
    raise RuntimeError(
        "This test file is not meant to be run directly, use:\n\n"
        "\tpython test/test_jit.py TESTNAME\n\n"

            

Reported by Pylint.

function already defined line 43
Error

Line: 53 Column: 9

                          val = torch.classes._TorchScriptTesting._Foo(5, 3)
            val.increment('foo')

        def f():
            ss = torch.classes._TorchScriptTesting._StackString(["asdf", "bruh"])
            return ss.pop()
        test_equality(f, lambda x: x)

        def f():

            

Reported by Pylint.

function already defined line 43
Error

Line: 58 Column: 9

                          return ss.pop()
        test_equality(f, lambda x: x)

        def f():
            ss1 = torch.classes._TorchScriptTesting._StackString(["asdf", "bruh"])
            ss2 = torch.classes._TorchScriptTesting._StackString(["111", "222"])
            ss1.push(ss2.pop())
            return ss1.pop() + ss2.pop()
        test_equality(f, lambda x: x)

            

Reported by Pylint.

Access to a protected member _Foo of a client class
Error

Line: 44 Column: 19

                          return (cmp_key(obj1), cmp_key(obj2))

        def f():
            val = torch.classes._TorchScriptTesting._Foo(5, 3)
            val.increment(1)
            return val
        test_equality(f, lambda x: x)

        with self.assertRaisesRegex(RuntimeError, "Expected a value of type 'int'"):

            

Reported by Pylint.

Access to a protected member _TorchScriptTesting of a client class
Error

Line: 44 Column: 19

                          return (cmp_key(obj1), cmp_key(obj2))

        def f():
            val = torch.classes._TorchScriptTesting._Foo(5, 3)
            val.increment(1)
            return val
        test_equality(f, lambda x: x)

        with self.assertRaisesRegex(RuntimeError, "Expected a value of type 'int'"):

            

Reported by Pylint.

Access to a protected member _TorchScriptTesting of a client class
Error

Line: 50 Column: 19

                      test_equality(f, lambda x: x)

        with self.assertRaisesRegex(RuntimeError, "Expected a value of type 'int'"):
            val = torch.classes._TorchScriptTesting._Foo(5, 3)
            val.increment('foo')

        def f():
            ss = torch.classes._TorchScriptTesting._StackString(["asdf", "bruh"])
            return ss.pop()

            

Reported by Pylint.

Access to a protected member _Foo of a client class
Error

Line: 50 Column: 19

                      test_equality(f, lambda x: x)

        with self.assertRaisesRegex(RuntimeError, "Expected a value of type 'int'"):
            val = torch.classes._TorchScriptTesting._Foo(5, 3)
            val.increment('foo')

        def f():
            ss = torch.classes._TorchScriptTesting._StackString(["asdf", "bruh"])
            return ss.pop()

            

Reported by Pylint.

caffe2/python/caffe_translator.py
224 issues
No name 'caffe2_legacy_pb2' in module 'caffe2.proto'
Error

Line: 10 Column: 1

              import re
import numpy as np  # noqa

from caffe2.proto import caffe2_pb2, caffe2_legacy_pb2
from caffe.proto import caffe_pb2
from caffe2.python import core, utils, workspace
from google.protobuf import text_format

logging.basicConfig()

            

Reported by Pylint.

Unable to import 'caffe.proto'
Error

Line: 11 Column: 1

              import numpy as np  # noqa

from caffe2.proto import caffe2_pb2, caffe2_legacy_pb2
from caffe.proto import caffe_pb2
from caffe2.python import core, utils, workspace
from google.protobuf import text_format

logging.basicConfig()
log = logging.getLogger("caffe_translator")

            

Reported by Pylint.

Unable to import 'google.protobuf'
Error

Line: 13 Column: 1

              from caffe2.proto import caffe2_pb2, caffe2_legacy_pb2
from caffe.proto import caffe_pb2
from caffe2.python import core, utils, workspace
from google.protobuf import text_format

logging.basicConfig()
log = logging.getLogger("caffe_translator")
log.setLevel(logging.INFO)


            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'Workspace' member
Error

Line: 52 Column: 10

              
def _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops):
    dim_map = {}
    ws = workspace.C.Workspace()
    for param in net_params.protos:
        ws.create_blob(param.name) \
            .feed(utils.Caffe2TensorToNumpyArray(param))
    external_input = net.op[0].input[0]
    ws.create_blob(external_input).feed(dummy_input)

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'Workspace' member
Error

Line: 127 Column: 14

              
        # Running with the legacy pad argument removed
        # compare the dimensions and adjust pad argument when necessary
        ws = workspace.C.Workspace()

        external_input = net.op[0].input[0]
        ws.create_blob(external_input).feed_blob(dummy_input)
        for param in net_params.protos:
            ws.create_blob(param.name) \

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'Workspace' member
Error

Line: 167 Column: 10

              
def _GetBlobDimMap(net, net_params, dummy_input):
    dim_map = {}
    ws = workspace.C.Workspace()
    for param in net_params.protos:
        ws.create_blob(param.name) \
          .feed(utils.Caffe2TensorToNumpyArray(param))
    external_input = net.op[0].input[0]
    ws.create_blob(external_input).feed(dummy_input)

            

Reported by Pylint.

Redefining name 'net' from outer scope (line 917)
Error

Line: 50 Column: 20

                  return ret


def _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops):
    dim_map = {}
    ws = workspace.C.Workspace()
    for param in net_params.protos:
        ws.create_blob(param.name) \
            .feed(utils.Caffe2TensorToNumpyArray(param))

            

Reported by Pylint.

Redefining name 'external_input' from outer scope (line 924)
Error

Line: 56 Column: 5

                  for param in net_params.protos:
        ws.create_blob(param.name) \
            .feed(utils.Caffe2TensorToNumpyArray(param))
    external_input = net.op[0].input[0]
    ws.create_blob(external_input).feed(dummy_input)
    # Get dimensions with legacy pad
    for i in range(len(net.op)):
        op_def = net.op[i]
        ws._run_operator(op_def.SerializeToString())

            

Reported by Pylint.

Access to a protected member _run_operator of a client class
Error

Line: 61 Column: 9

                  # Get dimensions with legacy pad
    for i in range(len(net.op)):
        op_def = net.op[i]
        ws._run_operator(op_def.SerializeToString())
        if i in legacy_pad_ops:
            output = op_def.output[0]
            blob_legacy = ws.fetch_blob(output)
            dim_map[i] = blob_legacy.shape
    return dim_map

            

Reported by Pylint.

Unused argument 'op_def'
Error

Line: 69 Column: 23

                  return dim_map


def _GetLegacyPadArgs(op_def, arg_map):
    pads = {}
    keys = ['pad_l', 'pad_t', 'pad_r', 'pad_b']
    is_pad = 'pad' in arg_map
    if is_pad:
        for k in keys:

            

Reported by Pylint.

android/pytorch_android/src/androidTest/java/org/pytorch/PytorchTestBase.java
223 issues
This class has too many methods, consider refactoring it.
Design

Line: 14

              import java.util.Map;
import org.junit.Test;

public abstract class PytorchTestBase {
  private static final String TEST_MODULE_ASSET_NAME = "test.pt";

  @Test
  public void testForwardNull() throws IOException {
    final Module module = Module.load(assetFilePath(TEST_MODULE_ASSET_NAME));

            

Reported by PMD.

Unit tests should not contain more than 1 assert(s).
Design

Line: 18

                private static final String TEST_MODULE_ASSET_NAME = "test.pt";

  @Test
  public void testForwardNull() throws IOException {
    final Module module = Module.load(assetFilePath(TEST_MODULE_ASSET_NAME));
    final IValue input = IValue.from(Tensor.fromBlob(Tensor.allocateByteBuffer(1), new long[] {1}));
    assertTrue(input.isTensor());
    final IValue output = module.forward(input);
    assertTrue(output.isNull());

            

Reported by PMD.

JUnit assertions should include a message
Design

Line: 21

                public void testForwardNull() throws IOException {
    final Module module = Module.load(assetFilePath(TEST_MODULE_ASSET_NAME));
    final IValue input = IValue.from(Tensor.fromBlob(Tensor.allocateByteBuffer(1), new long[] {1}));
    assertTrue(input.isTensor());
    final IValue output = module.forward(input);
    assertTrue(output.isNull());
  }

  @Test

            

Reported by PMD.

Potential violation of Law of Demeter (object not created locally)
Design

Line: 22

                  final Module module = Module.load(assetFilePath(TEST_MODULE_ASSET_NAME));
    final IValue input = IValue.from(Tensor.fromBlob(Tensor.allocateByteBuffer(1), new long[] {1}));
    assertTrue(input.isTensor());
    final IValue output = module.forward(input);
    assertTrue(output.isNull());
  }

  @Test
  public void testEqBool() throws IOException {

            

Reported by PMD.

JUnit assertions should include a message
Design

Line: 23

                  final IValue input = IValue.from(Tensor.fromBlob(Tensor.allocateByteBuffer(1), new long[] {1}));
    assertTrue(input.isTensor());
    final IValue output = module.forward(input);
    assertTrue(output.isNull());
  }

  @Test
  public void testEqBool() throws IOException {
    final Module module = Module.load(assetFilePath(TEST_MODULE_ASSET_NAME));

            

Reported by PMD.

Potential violation of Law of Demeter (object not created locally)
Design

Line: 23

                  final IValue input = IValue.from(Tensor.fromBlob(Tensor.allocateByteBuffer(1), new long[] {1}));
    assertTrue(input.isTensor());
    final IValue output = module.forward(input);
    assertTrue(output.isNull());
  }

  @Test
  public void testEqBool() throws IOException {
    final Module module = Module.load(assetFilePath(TEST_MODULE_ASSET_NAME));

            

Reported by PMD.

Unit tests should not contain more than 1 assert(s).
Design

Line: 27

                }

  @Test
  public void testEqBool() throws IOException {
    final Module module = Module.load(assetFilePath(TEST_MODULE_ASSET_NAME));
    for (boolean value : new boolean[] {false, true}) {
      final IValue input = IValue.from(value);
      assertTrue(input.isBool());
      assertTrue(value == input.toBool());

            

Reported by PMD.

JUnit assertions should include a message
Design

Line: 31

                  final Module module = Module.load(assetFilePath(TEST_MODULE_ASSET_NAME));
    for (boolean value : new boolean[] {false, true}) {
      final IValue input = IValue.from(value);
      assertTrue(input.isBool());
      assertTrue(value == input.toBool());
      final IValue output = module.runMethod("eqBool", input);
      assertTrue(output.isBool());
      assertTrue(value == output.toBool());
    }

            

Reported by PMD.

Potential violation of Law of Demeter (object not created locally)
Design

Line: 31

                  final Module module = Module.load(assetFilePath(TEST_MODULE_ASSET_NAME));
    for (boolean value : new boolean[] {false, true}) {
      final IValue input = IValue.from(value);
      assertTrue(input.isBool());
      assertTrue(value == input.toBool());
      final IValue output = module.runMethod("eqBool", input);
      assertTrue(output.isBool());
      assertTrue(value == output.toBool());
    }

            

Reported by PMD.

Potential violation of Law of Demeter (object not created locally)
Design

Line: 32

                  for (boolean value : new boolean[] {false, true}) {
      final IValue input = IValue.from(value);
      assertTrue(input.isBool());
      assertTrue(value == input.toBool());
      final IValue output = module.runMethod("eqBool", input);
      assertTrue(output.isBool());
      assertTrue(value == output.toBool());
    }
  }

            

Reported by PMD.

test/distributed/test_data_parallel.py
222 issues
Unable to import 'torch'
Error

Line: 8 Column: 1

              from itertools import product
import functools

import torch
from torch import nn
from torch.cuda.amp import autocast
import torch.nn.parallel as dp
from torch.testing._internal.common_cuda import TEST_MULTIGPU, TEST_CUDA
from torch.testing._internal.common_utils import run_tests, TestCase, repeat_test_for_types, ALL_TENSORTYPES

            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 9 Column: 1

              import functools

import torch
from torch import nn
from torch.cuda.amp import autocast
import torch.nn.parallel as dp
from torch.testing._internal.common_cuda import TEST_MULTIGPU, TEST_CUDA
from torch.testing._internal.common_utils import run_tests, TestCase, repeat_test_for_types, ALL_TENSORTYPES
from torch.testing._internal.common_utils import _assertGradAndGradgradChecks, gradcheck

            

Reported by Pylint.

Unable to import 'torch.cuda.amp'
Error

Line: 10 Column: 1

              
import torch
from torch import nn
from torch.cuda.amp import autocast
import torch.nn.parallel as dp
from torch.testing._internal.common_cuda import TEST_MULTIGPU, TEST_CUDA
from torch.testing._internal.common_utils import run_tests, TestCase, repeat_test_for_types, ALL_TENSORTYPES
from torch.testing._internal.common_utils import _assertGradAndGradgradChecks, gradcheck
from torch.testing._internal.common_utils import dtype2prec_DONTUSE

            

Reported by Pylint.

Unable to import 'torch.nn.parallel'
Error

Line: 11 Column: 1

              import torch
from torch import nn
from torch.cuda.amp import autocast
import torch.nn.parallel as dp
from torch.testing._internal.common_cuda import TEST_MULTIGPU, TEST_CUDA
from torch.testing._internal.common_utils import run_tests, TestCase, repeat_test_for_types, ALL_TENSORTYPES
from torch.testing._internal.common_utils import _assertGradAndGradgradChecks, gradcheck
from torch.testing._internal.common_utils import dtype2prec_DONTUSE
from torch.testing._internal.common_utils import sandcastle_skip_if

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_cuda'
Error

Line: 12 Column: 1

              from torch import nn
from torch.cuda.amp import autocast
import torch.nn.parallel as dp
from torch.testing._internal.common_cuda import TEST_MULTIGPU, TEST_CUDA
from torch.testing._internal.common_utils import run_tests, TestCase, repeat_test_for_types, ALL_TENSORTYPES
from torch.testing._internal.common_utils import _assertGradAndGradgradChecks, gradcheck
from torch.testing._internal.common_utils import dtype2prec_DONTUSE
from torch.testing._internal.common_utils import sandcastle_skip_if
import torch.nn.functional as F

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 13 Column: 1

              from torch.cuda.amp import autocast
import torch.nn.parallel as dp
from torch.testing._internal.common_cuda import TEST_MULTIGPU, TEST_CUDA
from torch.testing._internal.common_utils import run_tests, TestCase, repeat_test_for_types, ALL_TENSORTYPES
from torch.testing._internal.common_utils import _assertGradAndGradgradChecks, gradcheck
from torch.testing._internal.common_utils import dtype2prec_DONTUSE
from torch.testing._internal.common_utils import sandcastle_skip_if
import torch.nn.functional as F


            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 14 Column: 1

              import torch.nn.parallel as dp
from torch.testing._internal.common_cuda import TEST_MULTIGPU, TEST_CUDA
from torch.testing._internal.common_utils import run_tests, TestCase, repeat_test_for_types, ALL_TENSORTYPES
from torch.testing._internal.common_utils import _assertGradAndGradgradChecks, gradcheck
from torch.testing._internal.common_utils import dtype2prec_DONTUSE
from torch.testing._internal.common_utils import sandcastle_skip_if
import torch.nn.functional as F

torch.set_default_dtype(torch.double)

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 15 Column: 1

              from torch.testing._internal.common_cuda import TEST_MULTIGPU, TEST_CUDA
from torch.testing._internal.common_utils import run_tests, TestCase, repeat_test_for_types, ALL_TENSORTYPES
from torch.testing._internal.common_utils import _assertGradAndGradgradChecks, gradcheck
from torch.testing._internal.common_utils import dtype2prec_DONTUSE
from torch.testing._internal.common_utils import sandcastle_skip_if
import torch.nn.functional as F

torch.set_default_dtype(torch.double)


            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 16 Column: 1

              from torch.testing._internal.common_utils import run_tests, TestCase, repeat_test_for_types, ALL_TENSORTYPES
from torch.testing._internal.common_utils import _assertGradAndGradgradChecks, gradcheck
from torch.testing._internal.common_utils import dtype2prec_DONTUSE
from torch.testing._internal.common_utils import sandcastle_skip_if
import torch.nn.functional as F

torch.set_default_dtype(torch.double)

NO_NCCL = not hasattr(torch.distributed, "ProcessGroupNCCL")

            

Reported by Pylint.

Unable to import 'torch.nn.functional'
Error

Line: 17 Column: 1

              from torch.testing._internal.common_utils import _assertGradAndGradgradChecks, gradcheck
from torch.testing._internal.common_utils import dtype2prec_DONTUSE
from torch.testing._internal.common_utils import sandcastle_skip_if
import torch.nn.functional as F

torch.set_default_dtype(torch.double)

NO_NCCL = not hasattr(torch.distributed, "ProcessGroupNCCL")


            

Reported by Pylint.

torch/cuda/__init__.py
220 issues
Unable to import '__init__._utils'
Error

Line: 18 Column: 1

              import warnings
import threading
from typing import List, Optional, Tuple, Union, Any
from ._utils import _get_device_index, _dummy_type
from .streams import Stream, Event, _Graph, _graph_pool_handle
from .. import device as _device
import torch._C

try:

            

Reported by Pylint.

Unable to import '__init__.streams'
Error

Line: 19 Column: 1

              import threading
from typing import List, Optional, Tuple, Union, Any
from ._utils import _get_device_index, _dummy_type
from .streams import Stream, Event, _Graph, _graph_pool_handle
from .. import device as _device
import torch._C

try:
    from torch._C import _cudart  # type: ignore[attr-defined]

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 20 Column: 1

              from typing import List, Optional, Tuple, Union, Any
from ._utils import _get_device_index, _dummy_type
from .streams import Stream, Event, _Graph, _graph_pool_handle
from .. import device as _device
import torch._C

try:
    from torch._C import _cudart  # type: ignore[attr-defined]
except ImportError:

            

Reported by Pylint.

Module 'torch' has no 'version' member
Error

Line: 100 Column: 8

                  The minimum cuda capability supported by this library is %d.%d.
    """

    if torch.version.cuda is not None:  # on ROCm we don't want this check
        CUDA_VERSION = torch._C._cuda_getCompiledVersion()
        for d in range(device_count()):
            capability = get_device_capability(d)
            major = capability[0]
            minor = capability[1]

            

Reported by Pylint.

Module 'torch' has no 'version' member
Error

Line: 120 Column: 8

              The current PyTorch install supports CUDA capabilities {}.
If you want to use the {} GPU with PyTorch, please check the instructions at https://pytorch.org/get-started/locally/
"""
    if torch.version.cuda is None:  # on ROCm we don't want this check
        return
    arch_list = get_arch_list()
    if len(arch_list) == 0:
        return
    supported_sm = [int(arch.split('_')[1]) for arch in arch_list if 'sm_' in arch]

            

Reported by Pylint.

Undefined variable '_get_device_properties'
Error

Line: 350 Column: 12

                  device = _get_device_index(device, optional=True)
    if device < 0 or device >= device_count():
        raise AssertionError("Invalid device id")
    return _get_device_properties(device)  # type: ignore[name-defined]

def can_device_access_peer(device: _device_t, peer_device: _device_t) -> bool:
    r"""Checks if peer access between two devices is possible.
    """
    _lazy_init()

            

Reported by Pylint.

Unable to import '__init__.memory'
Error

Line: 564 Column: 1

                  return torch._C._cuda_get_sync_debug_mode()


from .memory import *  # noqa: F403


from .random import *  # noqa: F403

################################################################################

            

Reported by Pylint.

Unable to import '__init__.random'
Error

Line: 567 Column: 1

              from .memory import *  # noqa: F403


from .random import *  # noqa: F403

################################################################################
# Define Storage and Tensor classes
################################################################################


            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 574 Column: 1

              ################################################################################


from ..storage import _StorageBase


if not hasattr(torch._C, 'CudaDoubleStorageBase'):
    # Define dummy base classes
    for t in ['Double', 'Float', 'Long', 'Int', 'Short', 'Char', 'Byte', 'Half', 'Bool', 'BFloat16',

            

Reported by Pylint.

Instance of '_CudaBase' has no 'get_device' member
Error

Line: 607 Column: 21

                      # We could use a Protocol here to tell mypy that self has `get_device` method
        # but it is only available in the typing module on Python >= 3.8
        # or on typing_extensions module on Python >= 3.6
        with device(self.get_device()):  # type: ignore[attr-defined]
            return super(_CudaBase, self).type(*args, **kwargs)  # type: ignore[misc]

    __new__ = _lazy_new



            

Reported by Pylint.

torch/fx/experimental/fx2trt/converters/acc_ops_converters.py
220 issues
Unable to import 'tensorrt'
Error

Line: 8 Column: 1

              import torch.fx.experimental.fx_acc.acc_ops as acc_ops
import torch.fx.experimental.fx_acc.acc_utils as acc_utils
import numpy as np
import tensorrt as trt
import torch
from torch.fx.experimental.fx2trt.fx2trt import (
    tensorrt_converter,
    torch_dtype_from_trt,
    get_dynamic_dims,

            

Reported by Pylint.

Module 'torch' has no 'IntTensor' member
Error

Line: 55 Column: 18

                      and rely on broadcasting to expand the dimensions as needed
    """
    if isinstance(tensor, int):
        tensor = torch.IntTensor([tensor])

    if isinstance(tensor, float):
        tensor = torch.Tensor([tensor])

    shape = tuple(tensor.shape)

            

Reported by Pylint.

Module 'torch' has no 'Size' member
Error

Line: 382 Column: 20

              
    if not has_dynamic_shape(input_val.shape):
        if network.has_implicit_batch_dimension:
            return torch.Size((IMPLICIT_BATCH_DIM,) + tuple(input_val.shape))
        return torch.Size(input_val.shape)

    layer = network.add_shape(input_val)
    layer.name = name
    return layer.get_output(0)

            

Reported by Pylint.

Module 'torch' has no 'Size' member
Error

Line: 383 Column: 16

                  if not has_dynamic_shape(input_val.shape):
        if network.has_implicit_batch_dimension:
            return torch.Size((IMPLICIT_BATCH_DIM,) + tuple(input_val.shape))
        return torch.Size(input_val.shape)

    layer = network.add_shape(input_val)
    layer.name = name
    return layer.get_output(0)


            

Reported by Pylint.

Module 'torch' has no 'ones' member
Error

Line: 875 Column: 11

                  acc_ops_clamp_shape = (1,) * len(input.shape)  # broadcast all dimensions
    acc_ops_clamp_tensor = (
        val
        * torch.ones(acc_ops_clamp_shape, dtype=torch_dtype_from_trt(input.dtype))
        .cpu()
        .numpy()
    )
    acc_ops_clamp_trt = network.add_constant(acc_ops_clamp_shape, acc_ops_clamp_tensor)
    layer = network.add_elementwise(input, acc_ops_clamp_trt.get_output(0), op)

            

Reported by Pylint.

Module 'torch' has no 'qint8' member
Error

Line: 1121 Column: 36

                  q_scale = acc_utils.get_field_from_acc_out_ty(kwargs["acc_out_ty"], "q_scale")
    q_zero_point = acc_utils.get_field_from_acc_out_ty(kwargs["acc_out_ty"], "q_zero_point")
    dtype = acc_utils.get_field_from_acc_out_ty(kwargs["acc_out_ty"], "dtype")
    if dtype not in (torch.quint8, torch.qint8, torch.qint32):
        raise RuntimeError("Only support (torch.quint8, torch.qint8, torch.qint32) "
                           f"quantized type in quantize_per_tensor, get {dtype}.")

    if q_zero_point != 0:
        raise RuntimeError(f"Only support zero_point == 0, get {q_zero_point}")

            

Reported by Pylint.

Module 'torch' has no 'qint32' member
Error

Line: 1121 Column: 49

                  q_scale = acc_utils.get_field_from_acc_out_ty(kwargs["acc_out_ty"], "q_scale")
    q_zero_point = acc_utils.get_field_from_acc_out_ty(kwargs["acc_out_ty"], "q_zero_point")
    dtype = acc_utils.get_field_from_acc_out_ty(kwargs["acc_out_ty"], "dtype")
    if dtype not in (torch.quint8, torch.qint8, torch.qint32):
        raise RuntimeError("Only support (torch.quint8, torch.qint8, torch.qint32) "
                           f"quantized type in quantize_per_tensor, get {dtype}.")

    if q_zero_point != 0:
        raise RuntimeError(f"Only support zero_point == 0, get {q_zero_point}")

            

Reported by Pylint.

Module 'torch' has no 'quint8' member
Error

Line: 1121 Column: 22

                  q_scale = acc_utils.get_field_from_acc_out_ty(kwargs["acc_out_ty"], "q_scale")
    q_zero_point = acc_utils.get_field_from_acc_out_ty(kwargs["acc_out_ty"], "q_zero_point")
    dtype = acc_utils.get_field_from_acc_out_ty(kwargs["acc_out_ty"], "dtype")
    if dtype not in (torch.quint8, torch.qint8, torch.qint32):
        raise RuntimeError("Only support (torch.quint8, torch.qint8, torch.qint32) "
                           f"quantized type in quantize_per_tensor, get {dtype}.")

    if q_zero_point != 0:
        raise RuntimeError(f"Only support zero_point == 0, get {q_zero_point}")

            

Reported by Pylint.

Module 'torch' has no 'qint8' member
Error

Line: 1157 Column: 36

                  q_zero_point = acc_utils.get_field_from_acc_out_ty(kwargs["input_tensor_meta"], "q_zero_point")
    dtype = acc_utils.get_field_from_acc_out_ty(kwargs["input_tensor_meta"], "dtype")

    if dtype not in (torch.quint8, torch.qint8, torch.qint32):
        raise RuntimeError("Only support (torch.quint8, torch.qint8, torch.qint32) "
                           f"quantized type in dequantize, get {dtype}.")

    if q_zero_point != 0:
        raise RuntimeError(f"Only support zero_point == 0, get {q_zero_point}")

            

Reported by Pylint.

Module 'torch' has no 'quint8' member
Error

Line: 1157 Column: 22

                  q_zero_point = acc_utils.get_field_from_acc_out_ty(kwargs["input_tensor_meta"], "q_zero_point")
    dtype = acc_utils.get_field_from_acc_out_ty(kwargs["input_tensor_meta"], "dtype")

    if dtype not in (torch.quint8, torch.qint8, torch.qint32):
        raise RuntimeError("Only support (torch.quint8, torch.qint8, torch.qint32) "
                           f"quantized type in dequantize, get {dtype}.")

    if q_zero_point != 0:
        raise RuntimeError(f"Only support zero_point == 0, get {q_zero_point}")

            

Reported by Pylint.

torch/testing/_internal/common_device_type.py
220 issues
Module 'torch' has no 'ones' member
Error

Line: 454 Column: 13

                  @classmethod
    def setUpClass(cls):
        # has_magma shows up after cuda is initialized
        t = torch.ones(1).cuda()
        cls.no_magma = not torch.cuda.has_magma

        # Determines if cuDNN is available and its version
        cls.no_cudnn = not torch.backends.cudnn.is_acceptable(t)
        cls.cudnn_version = None if cls.no_cudnn else torch.backends.cudnn.version()

            

Reported by Pylint.

Module 'torch' has no 'dtype' member
Error

Line: 714 Column: 52

              
class ops(_TestParametrizer):
    def __init__(self, op_list, *, dtypes: OpDTypes = OpDTypes.basic,
                 allowed_dtypes: Optional[Sequence[torch.dtype]] = None):
        self.op_list = op_list
        self.opinfo_dtypes = dtypes
        self.allowed_dtypes = set(allowed_dtypes) if allowed_dtypes is not None else None

    def _parametrize_test(self, test, generic_cls, device_cls):

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 825 Column: 8

                      super().__init__(dep, reason, device_type='meta')

def _has_sufficient_memory(device, size):
    if torch.device(device).type == 'cuda':
        if not torch.cuda.is_available():
            return False
        gc.collect()
        torch.cuda.empty_cache()
        return torch.cuda.get_device_properties(device).total_memory - torch.cuda.memory_allocated(device) >= size

            

Reported by Pylint.

Module 'torch' has no 'dtype' member
Error

Line: 975 Column: 38

                  def __init__(self, d):
        assert isinstance(d, dict), "precisionOverride not given a dtype : precision dict!"
        for dtype, prec in d.items():
            assert isinstance(dtype, torch.dtype), "precisionOverride given unknown dtype {0}".format(dtype)

        self.d = d

    def __call__(self, fn):
        fn.precision_overrides = self.d

            

Reported by Pylint.

Module 'torch' has no 'dtype' member
Error

Line: 1006 Column: 38

                  def __init__(self, d):
        assert isinstance(d, dict), "toleranceOverride not given a dtype : tol dict!"
        for dtype, prec in d.items():
            assert isinstance(dtype, torch.dtype), "toleranceOverride given unknown dtype {0}".format(dtype)
            assert isinstance(prec, tol), "toleranceOverride not given a dtype : tol dict!"

        self.d = d

    def __call__(self, fn):

            

Reported by Pylint.

Module 'torch' has no 'dtype' member
Error

Line: 1034 Column: 46

                                  "When one dtype variant is a tuple or list, " \
                    "all dtype variants must be. " \
                    "Received non-list non-tuple dtype {0}".format(str(arg))
                assert all(isinstance(dtype, torch.dtype) for dtype in arg), "Unknown dtype in {0}".format(str(arg))
        else:
            assert all(isinstance(arg, torch.dtype) for arg in args), "Unknown dtype in {0}".format(str(args))

        self.args = args
        self.device_type = device_type

            

Reported by Pylint.

Module 'torch' has no 'dtype' member
Error

Line: 1036 Column: 40

                                  "Received non-list non-tuple dtype {0}".format(str(arg))
                assert all(isinstance(dtype, torch.dtype) for dtype in arg), "Unknown dtype in {0}".format(str(arg))
        else:
            assert all(isinstance(arg, torch.dtype) for arg in args), "Unknown dtype in {0}".format(str(args))

        self.args = args
        self.device_type = device_type

    def __call__(self, fn):

            

Reported by Pylint.

Redefining name 'dtypes' from outer scope (line 1025)
Error

Line: 260 Column: 24

                  return str(dtype).split('.')[1]


def _dtype_test_suffix(dtypes):
    """ Returns the test suffix for a dtype, sequence of dtypes, or None. """
    if isinstance(dtypes, list) or isinstance(dtypes, tuple):
        if len(dtypes) == 0:
            return ''
        return '_' + '_'.join((_dtype_name(d) for d in dtypes))

            

Reported by Pylint.

Unused argument 'name'
Error

Line: 354 Column: 42

                  @classmethod
    def instantiate_test(cls, name, test, *, generic_cls=None):

        def instantiate_test_helper(cls, name, *, test, param_kwargs=None):
            # Constructs the test
            @wraps(test)
            def instantiated_test(self, param_kwargs=param_kwargs):
                # Add the device param kwarg if the test needs device or devices.
                param_kwargs = {} if param_kwargs is None else param_kwargs

            

Reported by Pylint.

Access to a protected member _apply_precision_override_for_test of a client class
Error

Line: 372 Column: 21

                              guard_precision = self.precision
                guard_rel_tol = self.rel_tol
                try:
                    self._apply_precision_override_for_test(test, param_kwargs)
                    result = test(self, **param_kwargs)
                except RuntimeError as rte:
                    # check if rte should stop entire test suite.
                    self._stop_test_suite = self._should_stop_test_suite()
                    # raise the runtime error as is for the test suite to record.

            

Reported by Pylint.

test/jit/test_hooks_modules.py
219 issues
Unable to import 'torch'
Error

Line: 1 Column: 1

              import torch
from typing import List, Tuple


class SubmoduleNoForwardInputs(torch.nn.Module):
    def __init__(self, name):
        super().__init__()
        self.name = name


            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 29 Column: 19

                      super().__init__()
        self.name = name

    def foo(self, input: str):
        return input

    def forward(self, input: str):
        input = input + "_inner_mod"
        input = self.foo(input)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 32 Column: 23

                  def foo(self, input: str):
        return input

    def forward(self, input: str):
        input = input + "_inner_mod"
        input = self.foo(input)
        return input



            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 44 Column: 23

                      self.name = name
        self.submodule = SubmoduleForwardSingleInput(submodule_name)

    def forward(self, input: str):
        input = input + "_outermod"
        return self.submodule(input)


class ModuleDirectforwardSubmodCall(torch.nn.Module):

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 55 Column: 23

                      self.name = name
        self.submodule = SubmoduleForwardSingleInput(submodule_name)

    def forward(self, input: str):
        input = input + "_outermod"
        return self.submodule.forward(input)


class SuboduleForwardMultipleInputs(torch.nn.Module):

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 87 Column: 23

                      super().__init__()
        self.name = name

    def forward(self, input: Tuple[int]):
        input_access = input[0]
        return (1,)


class ModuleForwardTupleInput(torch.nn.Module):

            

Reported by Pylint.

Unused variable 'input_access'
Error

Line: 88 Column: 9

                      self.name = name

    def forward(self, input: Tuple[int]):
        input_access = input[0]
        return (1,)


class ModuleForwardTupleInput(torch.nn.Module):
    def __init__(self, name: str, submodule_name: str):

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 98 Column: 23

                      self.name = name
        self.submodule = SubmoduleForwardTupleInput(submodule_name)

    def forward(self, input: Tuple[int]):
        input_access = input[0]
        return self.submodule((1,))


# Modules for JIT forward hook and pre-hooks python and cpp tests

            

Reported by Pylint.

Unused variable 'input_access'
Error

Line: 99 Column: 9

                      self.submodule = SubmoduleForwardTupleInput(submodule_name)

    def forward(self, input: Tuple[int]):
        input_access = input[0]
        return self.submodule((1,))


# Modules for JIT forward hook and pre-hooks python and cpp tests
def create_module_no_forward_input():

            

Reported by Pylint.

Unused argument 'input'
Error

Line: 108 Column: 24

                  # Use to test module level hooks with no forward input
    m = ModuleNoForwardInputs("outer_mod_name", "inner_mod_name")

    def pre_hook(self, input: Tuple[()]) -> None:
        assert self.name == "outer_mod_name"

    def forward_hook(self, input: Tuple[()], output: None):
        assert self.name == "outer_mod_name"


            

Reported by Pylint.