The following issues were found

caffe2/contrib/playground/AnyExp.py
138 issues
Attempted relative import beyond top-level package
Error

Line: 11 Column: 1

              from caffe2.python import workspace
from caffe2.python import timeout_guard
from caffe2.python import data_parallel_model
from . import checkpoint as checkpoint

from . import ModuleRegister as ModuleRegister
from . import module_map as module_map

# instantiate logger outside of distributed operators may trigger error

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 13 Column: 1

              from caffe2.python import data_parallel_model
from . import checkpoint as checkpoint

from . import ModuleRegister as ModuleRegister
from . import module_map as module_map

# instantiate logger outside of distributed operators may trigger error
# logger need to be created in each idividual operator instead.
import os

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 14 Column: 1

              from . import checkpoint as checkpoint

from . import ModuleRegister as ModuleRegister
from . import module_map as module_map

# instantiate logger outside of distributed operators may trigger error
# logger need to be created in each idividual operator instead.
import os
import inspect

            

Reported by Pylint.

Instance of 'AnyExpTrainer' has no 'train_model' member
Error

Line: 212 Column: 19

              
    def checkpoint(self, epoch):
        self.model_path = checkpoint.save_model_params(
            True, self.train_model, self.gen_checkpoint_path(True, epoch + 1),
            epoch + 1, self.opts, float('-inf'))

    def gen_checkpoint_path(self, is_checkpoint, epoch):
        if (is_checkpoint):
            filename = "model_checkpoint_epoch{}.pkl".format(epoch)

            

Reported by Pylint.

Instance of 'AnyExpTrainer' has no 'test_model' member
Error

Line: 276 Column: 12

              
    @abstractmethod
    def run_testing_net(self):
        if self.test_model is None:
            return
        timeout = 2000.0
        with timeout_guard.CompleteInTimeOrDie(timeout):
            workspace.RunNet(self.test_model.net.Proto().name)


            

Reported by Pylint.

Instance of 'AnyExpTrainer' has no 'test_model' member
Error

Line: 280 Column: 30

                          return
        timeout = 2000.0
        with timeout_guard.CompleteInTimeOrDie(timeout):
            workspace.RunNet(self.test_model.net.Proto().name)

    # @abstractmethod
    def planning_output(self):
        self.init_metrics()
        self.init_plots()

            

Reported by Pylint.

Instance of 'AnyExpTrainer' has no 'train_model' member
Error

Line: 289 Column: 41

                      self.init_logs()

    def prep_data_parallel_models(self):
        self.prep_a_data_parallel_model(self.train_model,
                                        self.train_dataset, True)
        self.prep_a_data_parallel_model(self.test_model,
                                        self.test_dataset, False)

    def prep_a_data_parallel_model(self, model, dataset, is_train):

            

Reported by Pylint.

Instance of 'AnyExpTrainer' has no 'test_model' member
Error

Line: 291 Column: 41

                  def prep_data_parallel_models(self):
        self.prep_a_data_parallel_model(self.train_model,
                                        self.train_dataset, True)
        self.prep_a_data_parallel_model(self.test_model,
                                        self.test_dataset, False)

    def prep_a_data_parallel_model(self, model, dataset, is_train):
        if model is None:
            return

            

Reported by Pylint.

Instance of 'AnyExpTrainer' has no 'train_model' member
Error

Line: 362 Column: 31

                              ))
                start_epoch, prev_checkpointed_lr, _best_metric = \
                    checkpoint.initialize_params_from_file(
                        model=self.train_model,
                        weights_file=previous_checkpoint,
                        num_xpus=num_xpus,
                        opts=opts,
                        broadcast_computed_param=True,
                        reset_epoch=False,

            

Reported by Pylint.

Instance of 'AnyExpTrainer' has no 'train_model' member
Error

Line: 373 Column: 27

                          log.info("Load pretrained model: {}".format(pretrained_model))
            start_epoch, prev_checkpointed_lr, best_metric = \
                checkpoint.initialize_params_from_file(
                    model=self.train_model,
                    weights_file=pretrained_model,
                    num_xpus=num_xpus,
                    opts=opts,
                    broadcast_computed_param=True,
                    reset_epoch=opts['model_param']['reset_epoch'],

            

Reported by Pylint.

torch/onnx/symbolic_caffe2.py
138 issues
Access to a protected member _symbolic_versions of a client class
Error

Line: 12 Column: 5

                  sym_registry.register_version("", version)
    # Register all quantized ops
    module = importlib.import_module("torch.onnx.symbolic_caffe2")
    sym_registry._symbolic_versions["caffe2"] = module
    quant_version_ops = getmembers(sym_registry._symbolic_versions["caffe2"])
    for op in quant_version_ops:
        if isfunction(op[1]) and not sym_registry.is_registered_op(op[0], domain, version):
            aten_q_ops = ["relu", "_empty_affine_quantized", "dequantize",
                          "quantize_per_tensor", "upsample_nearest2d", "avg_pool2d",

            

Reported by Pylint.

Access to a protected member _symbolic_versions of a client class
Error

Line: 13 Column: 36

                  # Register all quantized ops
    module = importlib.import_module("torch.onnx.symbolic_caffe2")
    sym_registry._symbolic_versions["caffe2"] = module
    quant_version_ops = getmembers(sym_registry._symbolic_versions["caffe2"])
    for op in quant_version_ops:
        if isfunction(op[1]) and not sym_registry.is_registered_op(op[0], domain, version):
            aten_q_ops = ["relu", "_empty_affine_quantized", "dequantize",
                          "quantize_per_tensor", "upsample_nearest2d", "avg_pool2d",
                          "reshape", "slice", "cat", "max_pool2d", "sigmoid"]

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 23 Column: 24

                              sym_registry.register_op(op[0], op[1], "", version)
            sym_registry.register_op(op[0], op[1], domain, version)

def _permute_helper(g, input, axes):
    quant_args = {
        "axes_i": axes,
        "Y_scale_f": input.node()["Y_scale"],
        "Y_zero_point_i": input.node()["Y_zero_point"],
    }

            

Reported by Pylint.

Access to a protected member _quantized_ops of a client class
Error

Line: 30 Column: 5

                      "Y_zero_point_i": input.node()["Y_zero_point"],
    }
    output = g.op("_caffe2::Int8Transpose", input, **quant_args)
    sym_help._quantized_ops.add(output)
    return output

def nchw2nhwc(g, input):
    axes = [0, 2, 3, 1]
    return _permute_helper(g, input, axes)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 33 Column: 18

                  sym_help._quantized_ops.add(output)
    return output

def nchw2nhwc(g, input):
    axes = [0, 2, 3, 1]
    return _permute_helper(g, input, axes)

def nhwc2nchw(g, input):
    axes = [0, 3, 1, 2]

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 37 Column: 18

                  axes = [0, 2, 3, 1]
    return _permute_helper(g, input, axes)

def nhwc2nchw(g, input):
    axes = [0, 3, 1, 2]
    return _permute_helper(g, input, axes)

def linear_prepack(g, weight, bias):
    # Mapping to a dummy caffe2 prepack node.

            

Reported by Pylint.

Access to a protected member _quantized_ops of a client class
Error

Line: 46 Column: 5

                  # During the onnx -> c2 conversion we can look up original weight and bias
    # from this node
    output = g.op("_caffe2::WeightPrepack", weight, bias)
    sym_help._quantized_ops.add(output)
    return output

@parse_args("v", "v", "v", "f", "i")
def linear(g, input, weight, bias, scale, zero_point):
    kwargs = {

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 50 Column: 15

                  return output

@parse_args("v", "v", "v", "f", "i")
def linear(g, input, weight, bias, scale, zero_point):
    kwargs = {
        "Y_scale_f": scale,
        "Y_zero_point_i": zero_point,
    }
    output = g.op("_caffe2::Int8FC", input, weight, bias, **kwargs)

            

Reported by Pylint.

Access to a protected member _quantized_ops of a client class
Error

Line: 56 Column: 5

                      "Y_zero_point_i": zero_point,
    }
    output = g.op("_caffe2::Int8FC", input, weight, bias, **kwargs)
    sym_help._quantized_ops.add(output)
    return output

def conv_prepack(g, input, weight, bias, stride, padding, dilation, groups):
    # Mapping to a dummy caffe2 prepack node.
    # During the onnx -> c2 conversion we can look up original weight and bias

            

Reported by Pylint.

Unused argument 'dilation'
Error

Line: 59 Column: 59

                  sym_help._quantized_ops.add(output)
    return output

def conv_prepack(g, input, weight, bias, stride, padding, dilation, groups):
    # Mapping to a dummy caffe2 prepack node.
    # During the onnx -> c2 conversion we can look up original weight and bias
    # from this node
    output = g.op("_caffe2::WeightPrepack", input, weight, bias)
    sym_help._quantized_ops.add(output)

            

Reported by Pylint.

caffe2/python/onnx/tests/c2_ref_test.py
137 issues
Unable to import 'onnx'
Error

Line: 15 Column: 1

              from caffe2.python import core
from caffe2.proto import caffe2_pb2

import onnx
from onnx.helper import make_node, make_graph, make_tensor, make_tensor_value_info, make_model
from caffe2.python.onnx.helper import c2_native_run_net, c2_native_run_op

from onnx import mapping
import caffe2.python.onnx.frontend as c2_onnx

            

Reported by Pylint.

Unable to import 'onnx.helper'
Error

Line: 16 Column: 1

              from caffe2.proto import caffe2_pb2

import onnx
from onnx.helper import make_node, make_graph, make_tensor, make_tensor_value_info, make_model
from caffe2.python.onnx.helper import c2_native_run_net, c2_native_run_op

from onnx import mapping
import caffe2.python.onnx.frontend as c2_onnx
import caffe2.python.onnx.backend as c2

            

Reported by Pylint.

Unable to import 'onnx'
Error

Line: 19 Column: 1

              from onnx.helper import make_node, make_graph, make_tensor, make_tensor_value_info, make_model
from caffe2.python.onnx.helper import c2_native_run_net, c2_native_run_op

from onnx import mapping
import caffe2.python.onnx.frontend as c2_onnx
import caffe2.python.onnx.backend as c2

import numpy as np
from caffe2.python.models.download import ModelDownloader

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'DummyName' member
Error

Line: 33 Column: 13

              
class TestCaffe2Basic(TestCase):
    def test_dummy_name(self):
        g = C.DummyName()
        n1 = g.new_dummy_name()
        n2 = g.new_dummy_name()
        assert n1 != n2, "Got same names in different calls: {}".format(n1)

    def test_check_arguments(self):

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'Caffe2Backend' member
Error

Line: 39 Column: 14

                      assert n1 != n2, "Got same names in different calls: {}".format(n1)

    def test_check_arguments(self):
        b2 = C.Caffe2Backend()

        node_def = make_node("Add", inputs=["X", "Y"], outputs=["Z"])
        b2.convert_node(node_def.SerializeToString())

        bad_node_def = make_node("Add", inputs=["X", "Y"], outputs=["Z"], foo=42, bar=56)

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'Caffe2Backend' member
Error

Line: 461 Column: 19

                          transB=1,
            broadcast=1)

        backend = C.Caffe2Backend()

        # without broadcast and without shape info, gemm will be
        # converted to matmul + add
        _, op_strs = backend.convert_node(node_def.SerializeToString())
        op_names = []

            

Reported by Pylint.

Module 'caffe2.python._import_c_extension' has no 'Caffe2Backend' member
Error

Line: 664 Column: 17

                              dims=[1, 2, 3],
                vals=vals.flatten().tolist(),
            )
            b = C.Caffe2Backend()
            op = caffe2_pb2.OperatorDef()
            op.ParseFromString(b._build_tensor_filling_op(tensor.SerializeToString(), ''))
            self.assertEqual(len(op.input), 0)
            self.assertEqual(op.output, [tensor.name])
            ws, output = c2_native_run_op(op, inputs=[])

            

Reported by Pylint.

Unused variable 'ws'
Error

Line: 125 Column: 9

                              axis=3,
            ),
        ])
        ws, c2_outputs = c2_native_run_net(
            init_net=None,
            predict_net=predict_net,
            inputs=[X, W, B])

        onnx_model = c2_onnx.caffe2_net_to_onnx_model(

            

Reported by Pylint.

Unused variable 'ws'
Error

Line: 214 Column: 9

                              keepdims=1,
            ),
        ])
        ws, c2_outputs = c2_native_run_net(
            init_net=None,
            predict_net=predict_net,
            inputs=[X])

        onnx_model = c2_onnx.caffe2_net_to_onnx_model(

            

Reported by Pylint.

Unused variable 'ws'
Error

Line: 245 Column: 9

                              height_scale=height_scale,
            ),
        ])
        ws, c2_outputs = c2_native_run_net(
            init_net=None,
            predict_net=predict_net,
            inputs=[X])

        onnx_model = c2_onnx.caffe2_net_to_onnx_model(

            

Reported by Pylint.

test/onnx/test_onnx_opset.py
136 issues
No name 'TestCase' in module 'test_pytorch_common'
Error

Line: 1 Column: 1

              from test_pytorch_common import TestCase, run_tests

import torch
import torch.onnx
from torch.nn import Module

import onnx

import io

            

Reported by Pylint.

No name 'run_tests' in module 'test_pytorch_common'
Error

Line: 1 Column: 1

              from test_pytorch_common import TestCase, run_tests

import torch
import torch.onnx
from torch.nn import Module

import onnx

import io

            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 3 Column: 1

              from test_pytorch_common import TestCase, run_tests

import torch
import torch.onnx
from torch.nn import Module

import onnx

import io

            

Reported by Pylint.

Unable to import 'torch.onnx'
Error

Line: 4 Column: 1

              from test_pytorch_common import TestCase, run_tests

import torch
import torch.onnx
from torch.nn import Module

import onnx

import io

            

Reported by Pylint.

Unable to import 'torch.nn'
Error

Line: 5 Column: 1

              
import torch
import torch.onnx
from torch.nn import Module

import onnx

import io


            

Reported by Pylint.

Unable to import 'onnx'
Error

Line: 7 Column: 1

              import torch.onnx
from torch.nn import Module

import onnx

import io

from torch.onnx.symbolic_helper import _export_onnx_opset_version
from torch.onnx import ir_version, producer_name, producer_version

            

Reported by Pylint.

Unable to import 'torch.onnx.symbolic_helper'
Error

Line: 11 Column: 1

              
import io

from torch.onnx.symbolic_helper import _export_onnx_opset_version
from torch.onnx import ir_version, producer_name, producer_version


def check_onnx_opset_operator(model, ops, opset_version=_export_onnx_opset_version):
    # check_onnx_components

            

Reported by Pylint.

Unable to import 'torch.onnx'
Error

Line: 12 Column: 1

              import io

from torch.onnx.symbolic_helper import _export_onnx_opset_version
from torch.onnx import ir_version, producer_name, producer_version


def check_onnx_opset_operator(model, ops, opset_version=_export_onnx_opset_version):
    # check_onnx_components
    assert model.ir_version == ir_version and \

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 84 Column: 31

                      # test with dynamic k
        class MyModuleDynamic(torch.jit.ScriptModule):
            @torch.jit.script_method
            def forward(self, input, k):
                return torch.topk(input, k)

        ops_10 = [{"op_name": "Constant", "attributes": [{"name": "value", "type": 4}]},
                  {"op_name": "Reshape"},
                  {"op_name": "TopK", "attributes": [{"name": "axis", "i": -1, "type": 2}]}]

            

Reported by Pylint.

Useless super delegation in method '__init__'
Error

Line: 133 Column: 13

              
    def test_upsample(self):
        class MyModule(Module):
            def __init__(self):
                super(MyModule, self).__init__()

            def forward(self, x):
                size = [v * 2 for v in x.size()[2:]]
                size = [int(i) for i in size]

            

Reported by Pylint.

test/package/test_package_script.py
135 issues
Unable to import 'torch'
Error

Line: 5 Column: 1

              from textwrap import dedent
from unittest import skipIf

import torch
from torch.package import PackageExporter, PackageImporter
from torch.testing._internal.common_utils import (
    IS_FBCODE,
    IS_SANDCASTLE,
    run_tests,

            

Reported by Pylint.

Unable to import 'torch.package'
Error

Line: 6 Column: 1

              from unittest import skipIf

import torch
from torch.package import PackageExporter, PackageImporter
from torch.testing._internal.common_utils import (
    IS_FBCODE,
    IS_SANDCASTLE,
    run_tests,
)

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_utils'
Error

Line: 7 Column: 1

              
import torch
from torch.package import PackageExporter, PackageImporter
from torch.testing._internal.common_utils import (
    IS_FBCODE,
    IS_SANDCASTLE,
    run_tests,
)


            

Reported by Pylint.

Unable to import 'package_a.fake_interface'
Error

Line: 34 Column: 9

                  def test_package_interface(self):
        """Packaging an interface class should work correctly."""

        import package_a.fake_interface as fake

        uses_interface = fake.UsesInterface()
        scripted = torch.jit.script(uses_interface)
        scripted.proxy_mod = torch.jit.script(fake.NewModule())


            

Reported by Pylint.

Unable to import 'package_a.fake_interface'
Error

Line: 62 Column: 9

                      sure TorchScript can distinguish between the two.
        """
        # Import one version of the interface
        import package_a.fake_interface as fake

        # Simulate a package that contains a different version of the
        # interface, with the exact same name.
        buffer = BytesIO()
        with PackageExporter(buffer) as pe:

            

Reported by Pylint.

Unable to import 'package_a.fake_script_class'
Error

Line: 104 Column: 9

                      torch.jit.script(diff_fake.UsesInterface())

    def test_package_script_class(self):
        import package_a.fake_script_class as fake

        buffer = BytesIO()
        with PackageExporter(buffer) as pe:
            pe.save_module(fake.__name__)
        buffer.seek(0)

            

Reported by Pylint.

Unable to import 'package_a.fake_script_class'
Error

Line: 126 Column: 9

                      different than the one defined in the loading environment, to make
        sure TorchScript can distinguish between the two.
        """
        import package_a.fake_script_class as fake

        # Simulate a package that contains a different version of the
        # script class ,with the attribute `bar` instead of `foo`
        buffer = BytesIO()
        with PackageExporter(buffer) as pe2:

            

Reported by Pylint.

Unable to import 'package_a.test_module'
Error

Line: 158 Column: 9

                      """
        Test basic saving of ScriptModule.
        """
        from package_a.test_module import ModWithTensor

        scripted_mod = torch.jit.script(ModWithTensor(torch.rand(1, 2, 3)))

        buffer = BytesIO()
        with PackageExporter(buffer) as e:

            

Reported by Pylint.

Unable to import 'package_a.test_module'
Error

Line: 180 Column: 9

                      """
        Test basic saving of ScriptModule in file.
        """
        from package_a.test_module import ModWithTensor

        scripted_mod = torch.jit.script(ModWithTensor(torch.rand(1, 2, 3)))

        filename = self.temp()
        with PackageExporter(filename) as e:

            

Reported by Pylint.

Unable to import 'package_a.test_module'
Error

Line: 197 Column: 9

                      """
        Test basic saving of ScriptModule with submodule.
        """
        from package_a.test_module import ModWithSubmod, ModWithTensor

        scripted_mod = torch.jit.script(
            ModWithSubmod(ModWithTensor(torch.rand(1, 2, 3)))
        )


            

Reported by Pylint.

test/quantization/fx/test_equalize_fx.py
133 issues
Unable to import 'torch'
Error

Line: 1 Column: 1

              import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.intrinsic.quantized as nniq
import torch.nn.quantized as nnq
from torch.quantization import default_qconfig
from torch.quantization.observer import MinMaxObserver, PerChannelMinMaxObserver
from torch.quantization.quantize_fx import prepare_fx, convert_fx
from torch.quantization.fx._equalize import (

            

Reported by Pylint.

Unable to import 'torch.nn'
Error

Line: 2 Column: 1

              import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.intrinsic.quantized as nniq
import torch.nn.quantized as nnq
from torch.quantization import default_qconfig
from torch.quantization.observer import MinMaxObserver, PerChannelMinMaxObserver
from torch.quantization.quantize_fx import prepare_fx, convert_fx
from torch.quantization.fx._equalize import (

            

Reported by Pylint.

Unable to import 'torch.nn.functional'
Error

Line: 3 Column: 1

              import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.intrinsic.quantized as nniq
import torch.nn.quantized as nnq
from torch.quantization import default_qconfig
from torch.quantization.observer import MinMaxObserver, PerChannelMinMaxObserver
from torch.quantization.quantize_fx import prepare_fx, convert_fx
from torch.quantization.fx._equalize import (

            

Reported by Pylint.

Unable to import 'torch.nn.intrinsic.quantized'
Error

Line: 4 Column: 1

              import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.intrinsic.quantized as nniq
import torch.nn.quantized as nnq
from torch.quantization import default_qconfig
from torch.quantization.observer import MinMaxObserver, PerChannelMinMaxObserver
from torch.quantization.quantize_fx import prepare_fx, convert_fx
from torch.quantization.fx._equalize import (

            

Reported by Pylint.

Unable to import 'torch.nn.quantized'
Error

Line: 5 Column: 1

              import torch.nn as nn
import torch.nn.functional as F
import torch.nn.intrinsic.quantized as nniq
import torch.nn.quantized as nnq
from torch.quantization import default_qconfig
from torch.quantization.observer import MinMaxObserver, PerChannelMinMaxObserver
from torch.quantization.quantize_fx import prepare_fx, convert_fx
from torch.quantization.fx._equalize import (
    _InputEqualizationObserver,

            

Reported by Pylint.

Unable to import 'torch.quantization'
Error

Line: 6 Column: 1

              import torch.nn.functional as F
import torch.nn.intrinsic.quantized as nniq
import torch.nn.quantized as nnq
from torch.quantization import default_qconfig
from torch.quantization.observer import MinMaxObserver, PerChannelMinMaxObserver
from torch.quantization.quantize_fx import prepare_fx, convert_fx
from torch.quantization.fx._equalize import (
    _InputEqualizationObserver,
    _WeightEqualizationObserver,

            

Reported by Pylint.

Unable to import 'torch.quantization.observer'
Error

Line: 7 Column: 1

              import torch.nn.intrinsic.quantized as nniq
import torch.nn.quantized as nnq
from torch.quantization import default_qconfig
from torch.quantization.observer import MinMaxObserver, PerChannelMinMaxObserver
from torch.quantization.quantize_fx import prepare_fx, convert_fx
from torch.quantization.fx._equalize import (
    _InputEqualizationObserver,
    _WeightEqualizationObserver,
    calculate_equalization_scale,

            

Reported by Pylint.

Unable to import 'torch.quantization.quantize_fx'
Error

Line: 8 Column: 1

              import torch.nn.quantized as nnq
from torch.quantization import default_qconfig
from torch.quantization.observer import MinMaxObserver, PerChannelMinMaxObserver
from torch.quantization.quantize_fx import prepare_fx, convert_fx
from torch.quantization.fx._equalize import (
    _InputEqualizationObserver,
    _WeightEqualizationObserver,
    calculate_equalization_scale,
    default_equalization_qconfig,

            

Reported by Pylint.

Unable to import 'torch.quantization.fx._equalize'
Error

Line: 9 Column: 1

              from torch.quantization import default_qconfig
from torch.quantization.observer import MinMaxObserver, PerChannelMinMaxObserver
from torch.quantization.quantize_fx import prepare_fx, convert_fx
from torch.quantization.fx._equalize import (
    _InputEqualizationObserver,
    _WeightEqualizationObserver,
    calculate_equalization_scale,
    default_equalization_qconfig,
    _convert_equalization_ref,

            

Reported by Pylint.

Unable to import 'torch.testing._internal.common_quantization'
Error

Line: 19 Column: 1

                  get_equalization_qconfig_dict,
)

from torch.testing._internal.common_quantization import (
    NodeSpec as ns,
    QuantizationTestCase,
    SingleLayerLinearModel,
    TwoLayerLinearModel,
    LinearAddModel,

            

Reported by Pylint.

setup.py
132 issues
Unused argument 'args'
Error

Line: 272 Column: 1

                  def report(*args):
        print(*args)
else:
    def report(*args):
        pass

    # Make distutils respect --quiet too
    setuptools.distutils.log.warn = report


            

Reported by Pylint.

TODO: Fix for python < 3.3
Error

Line: 290 Column: 3

                      sysconfig.get_config_var("prefix"),
        sysconfig.get_config_var("VERSION"))
    # Fix virtualenv builds
    # TODO: Fix for python < 3.3
    if not os.path.exists(cmake_python_library):
        cmake_python_library = "{}/libs/python{}.lib".format(
            sys.base_prefix,
            sysconfig.get_config_var("VERSION"))
else:

            

Reported by Pylint.

Redefining name 'f' from outer scope (line 901)
Error

Line: 320 Column: 36

                                          ]]
    if not os.path.exists(git_modules_path):
        return default_modules_path
    with open(git_modules_path) as f:
        return [os.path.join(cwd, line.split("=", 1)[1].strip()) for line in
                f.readlines() if line.strip().startswith("path")]


def check_submodules():

            

Reported by Pylint.

os.getenv default type is builtins.bool. Expected str or None.
Error

Line: 335 Column: 13

                  def not_exists_or_empty(folder):
        return not os.path.exists(folder) or (os.path.isdir(folder) and len(os.listdir(folder)) == 0)

    if bool(os.getenv("USE_SYSTEM_LIBS", False)):
        return
    folders = get_submodule_folders()
    # If none of the submodule folders exists, try to initialize them
    if all(not_exists_or_empty(folder) for folder in folders):
        try:

            

Reported by Pylint.

Catching too general exception Exception
Error

Line: 346 Column: 16

                          subprocess.check_call(["git", "submodule", "update", "--init", "--recursive"], cwd=cwd)
            end = time.time()
            print(' --- Submodule initialization took {:.2f} sec'.format(end - start))
        except Exception:
            print(' --- Submodule initalization failed')
            print('Please run:\n\tgit submodule update --init --recursive --jobs 0')
            sys.exit(1)
    for folder in folders:
        check_for_files(folder, ["CMakeLists.txt", "Makefile", "setup.py", "LICENSE", "LICENSE.md", "LICENSE.txt"])

            

Reported by Pylint.

Consider explicitly re-raising using the 'from' keyword
Error

Line: 420 Column: 9

                  try:
        importlib.import_module(importname)
    except ImportError:
        raise RuntimeError(missing_pydep.format(importname=importname, module=module))


class build_ext(setuptools.command.build_ext.build_ext):

    # Copy libiomp5.dylib inside the wheel package on OS X

            

Reported by Pylint.

Redefining name 'i' from outer scope (line 246)
Error

Line: 555 Column: 9

                          'caffe2.python.caffe2_pybind11_state_gpu',
            'caffe2.python.caffe2_pybind11_state_hip',
        ]
        i = 0
        while i < len(self.extensions):
            ext = self.extensions[i]
            if ext.name not in caffe2_pybind_exts:
                i += 1
                continue

            

Reported by Pylint.

Redefining name 'f' from outer scope (line 901)
Error

Line: 589 Column: 36

              
    def create_compile_commands(self):
        def load(filename):
            with open(filename) as f:
                return json.load(f)
        ninja_files = glob.glob('build/*compile_commands.json')
        cmake_files = glob.glob('torch/lib/build/*/compile_commands.json')
        all_commands = [entry
                        for f in ninja_files + cmake_files

            

Reported by Pylint.

Redefining name 'f' from outer scope (line 901)
Error

Line: 608 Column: 56

                      new_contents = json.dumps(all_commands, indent=2)
        contents = ''
        if os.path.exists('compile_commands.json'):
            with open('compile_commands.json', 'r') as f:
                contents = f.read()
        if contents != new_contents:
            with open('compile_commands.json', 'w') as f:
                f.write(new_contents)


            

Reported by Pylint.

Attribute 'bsd_text' defined outside __init__
Error

Line: 629 Column: 13

                  def __enter__(self):
        """Concatenate files"""
        with open(self.f1, 'r') as f1:
            self.bsd_text = f1.read()

        with open(self.f1, 'a') as f1:
            with open(self.f2, 'r') as f2:
                self.bundled_text = f2.read()
                f1.write('\n\n')

            

Reported by Pylint.

test/distributions/test_transforms.py
131 issues
Unable to import 'pytest'
Error

Line: 3 Column: 1

              from numbers import Number

import pytest

import torch
from torch.autograd.functional import jacobian
from torch.distributions import Dirichlet, Independent, Normal, TransformedDistribution, constraints
from torch.distributions.transforms import (AbsTransform, AffineTransform, ComposeTransform,
                                            CorrCholeskyTransform, ExpTransform, IndependentTransform,

            

Reported by Pylint.

Unable to import 'torch'
Error

Line: 5 Column: 1

              
import pytest

import torch
from torch.autograd.functional import jacobian
from torch.distributions import Dirichlet, Independent, Normal, TransformedDistribution, constraints
from torch.distributions.transforms import (AbsTransform, AffineTransform, ComposeTransform,
                                            CorrCholeskyTransform, ExpTransform, IndependentTransform,
                                            LowerCholeskyTransform, PowerTransform, ReshapeTransform,

            

Reported by Pylint.

Unable to import 'torch.autograd.functional'
Error

Line: 6 Column: 1

              import pytest

import torch
from torch.autograd.functional import jacobian
from torch.distributions import Dirichlet, Independent, Normal, TransformedDistribution, constraints
from torch.distributions.transforms import (AbsTransform, AffineTransform, ComposeTransform,
                                            CorrCholeskyTransform, ExpTransform, IndependentTransform,
                                            LowerCholeskyTransform, PowerTransform, ReshapeTransform,
                                            SigmoidTransform, TanhTransform, SoftmaxTransform,

            

Reported by Pylint.

Unable to import 'torch.distributions'
Error

Line: 7 Column: 1

              
import torch
from torch.autograd.functional import jacobian
from torch.distributions import Dirichlet, Independent, Normal, TransformedDistribution, constraints
from torch.distributions.transforms import (AbsTransform, AffineTransform, ComposeTransform,
                                            CorrCholeskyTransform, ExpTransform, IndependentTransform,
                                            LowerCholeskyTransform, PowerTransform, ReshapeTransform,
                                            SigmoidTransform, TanhTransform, SoftmaxTransform,
                                            StickBreakingTransform, identity_transform, Transform,

            

Reported by Pylint.

Unable to import 'torch.distributions.transforms'
Error

Line: 8 Column: 1

              import torch
from torch.autograd.functional import jacobian
from torch.distributions import Dirichlet, Independent, Normal, TransformedDistribution, constraints
from torch.distributions.transforms import (AbsTransform, AffineTransform, ComposeTransform,
                                            CorrCholeskyTransform, ExpTransform, IndependentTransform,
                                            LowerCholeskyTransform, PowerTransform, ReshapeTransform,
                                            SigmoidTransform, TanhTransform, SoftmaxTransform,
                                            StickBreakingTransform, identity_transform, Transform,
                                            _InverseTransform)

            

Reported by Pylint.

Unable to import 'torch.distributions.utils'
Error

Line: 14 Column: 1

                                                          SigmoidTransform, TanhTransform, SoftmaxTransform,
                                            StickBreakingTransform, identity_transform, Transform,
                                            _InverseTransform)
from torch.distributions.utils import tril_matrix_to_vec, vec_to_tril_matrix


def get_transforms(cache_size):
    transforms = [
        AbsTransform(cache_size=cache_size),

            

Reported by Pylint.

Access to a protected member _cache_size of a client class
Error

Line: 79 Column: 107

                      if isinstance(transform.loc, Number):
            return transform
        try:
            return AffineTransform(transform.loc.expand(shape), transform.scale.expand(shape), cache_size=transform._cache_size)
        except RuntimeError:
            return AffineTransform(transform.loc.reshape(shape), transform.scale.reshape(shape), cache_size=transform._cache_size)
    if isinstance(transform, ComposeTransform):
        reshaped_parts = []
        for p in transform.parts:

            

Reported by Pylint.

Access to a protected member _cache_size of a client class
Error

Line: 81 Column: 109

                      try:
            return AffineTransform(transform.loc.expand(shape), transform.scale.expand(shape), cache_size=transform._cache_size)
        except RuntimeError:
            return AffineTransform(transform.loc.reshape(shape), transform.scale.reshape(shape), cache_size=transform._cache_size)
    if isinstance(transform, ComposeTransform):
        reshaped_parts = []
        for p in transform.parts:
            reshaped_parts.append(reshape_transform(p, shape))
        return ComposeTransform(reshaped_parts, cache_size=transform._cache_size)

            

Reported by Pylint.

Access to a protected member _cache_size of a client class
Error

Line: 86 Column: 60

                      reshaped_parts = []
        for p in transform.parts:
            reshaped_parts.append(reshape_transform(p, shape))
        return ComposeTransform(reshaped_parts, cache_size=transform._cache_size)
    if isinstance(transform.inv, AffineTransform):
        return reshape_transform(transform.inv, shape).inv
    if isinstance(transform.inv, ComposeTransform):
        return reshape_transform(transform.inv, shape).inv
    return transform

            

Reported by Pylint.

Access to a protected member _inv of a client class
Error

Line: 97 Column: 24

              # Generate pytest ids
def transform_id(x):
    assert isinstance(x, Transform)
    name = f'Inv({type(x._inv).__name__})' if isinstance(x, _InverseTransform) else f'{type(x).__name__}'
    return f'{name}(cache_size={x._cache_size})'


def generate_data(transform):
    torch.manual_seed(1)

            

Reported by Pylint.

caffe2/python/schema.py
131 issues
Access to a protected member _child_base_id of a client class
Error

Line: 180 Column: 20

                      p, i = self._parent
        pos = 0 if child_index is None else self._field_offsets[child_index]
        if p:
            pos += p._child_base_id(i)
        return pos

    def __eq__(self, other):
        """Equivalance of two schemas"""
        return (

            

Reported by Pylint.

Access to a protected member _pprint_impl of a client class
Error

Line: 254 Column: 9

                  def _pprint_impl(self, indent, str_buffer):
        str_buffer.write('  ' * indent + "List(\n")
        str_buffer.write('  ' * (indent + 1) + "lengths=\n")
        self.lengths._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * (indent + 1) + "_items=\n")
        self._items._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * indent + ")\n")

    def __getattr__(self, item):

            

Reported by Pylint.

Access to a protected member _pprint_impl of a client class
Error

Line: 256 Column: 9

                      str_buffer.write('  ' * (indent + 1) + "lengths=\n")
        self.lengths._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * (indent + 1) + "_items=\n")
        self._items._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * indent + ")\n")

    def __getattr__(self, item):
        """If the value of this list is a struct,
        allow to introspect directly into its fields."""

            

Reported by Pylint.

Access to a protected member _pprint_impl of a client class
Error

Line: 332 Column: 9

                  def _pprint_impl(self, indent, str_buffer):
        str_buffer.write('  ' * indent + "ListWithEvicted(\n")
        str_buffer.write('  ' * (indent + 1) + "lengths=\n")
        self.lengths._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * (indent + 1) + "_items=\n")
        self._items._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * (indent + 1) + "_evicted_values=\n")
        self._evicted_values._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * indent + ")\n")

            

Reported by Pylint.

Access to a protected member _pprint_impl of a client class
Error

Line: 334 Column: 9

                      str_buffer.write('  ' * (indent + 1) + "lengths=\n")
        self.lengths._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * (indent + 1) + "_items=\n")
        self._items._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * (indent + 1) + "_evicted_values=\n")
        self._evicted_values._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * indent + ")\n")



            

Reported by Pylint.

Access to a protected member _pprint_impl of a client class
Error

Line: 336 Column: 9

                      str_buffer.write('  ' * (indent + 1) + "_items=\n")
        self._items._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * (indent + 1) + "_evicted_values=\n")
        self._evicted_values._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * indent + ")\n")


    def __getattr__(self, item):
        """If the value of this list is a struct,

            

Reported by Pylint.

Redefining built-in 'id'
Error

Line: 420 Column: 13

                          ):
                raise ValueError('Duplicate field name: %s' % name)
            self.fields[name] = self.fields[name] + field
        for id, (_, field) in enumerate(viewitems(self.fields)):
            field._set_parent(self, id)
        super(Struct, self).__init__(viewvalues(self.fields))
        self._frozen = True

    def _struct_from_nested_name(self, nested_name, field):

            

Reported by Pylint.

Access to a protected member _pprint_impl of a client class
Error

Line: 500 Column: 13

                      str_buffer.write('  ' * indent + "Struct( \n")
        for name, field in viewitems(self.fields):
            str_buffer.write('  ' * (indent + 1) + "{}=".format(name) + "\n")
            field._pprint_impl(indent=indent + 2, str_buffer=str_buffer)
        str_buffer.write('  ' * indent + ") \n")

    def __contains__(self, item):
        field = self._get_field_by_nested_name(item)
        return field is not None

            

Reported by Pylint.

Consider explicitly re-raising using the 'from' keyword
Error

Line: 550 Column: 13

                      try:
            return super(Struct, self).__getattribute__("fields")[item]
        except KeyError:
            raise AttributeError(item)

    def __setattr__(self, key, value):
        # Disable setting attributes after initialization to prevent false
        # impression of being able to overwrite a field.
        # Allowing setting internal states mainly so that _parent can be set

            

Reported by Pylint.

TODO(azzolini): figure out better way of representing this
Error

Line: 857 Column: 3

                                  'Invalid blob type: %s' % str(type(blob)))

            # reshape scalars into 1D arrays
            # TODO(azzolini): figure out better way of representing this
            if len(blob.shape) == 0 and not preserve_shape:
                blob = blob.reshape((1, ))

            # infer inner shape from the blob given
            # TODO(dzhulgakov): tweak this to make it work with PackedStruct

            

Reported by Pylint.

torch/distributed/algorithms/ddp_comm_hooks/powerSGD_hook.py
130 issues
Attempted relative import beyond top-level package
Error

Line: 8 Column: 1

              import torch
import torch.distributed as dist

from . import default_hooks as default


def _orthogonalize(matrix, epsilon=0):
    """
    Applies Gram-Schmidt procedure to orthogonalize a given 2D tensor.

            

Reported by Pylint.

Module 'torch' has no 'sum' member
Error

Line: 42 Column: 21

                      # Project it on the rest and remove it.
        if i + 1 < num_cols:
            rest = matrix[:, i + 1 :]
            rest -= torch.sum(col * rest, dim=0) * col


def _should_compress(
    num_rows, num_cols, matrix_approximation_rank, min_compression_rate
):

            

Reported by Pylint.

Module 'numpy.random' has no 'RandomState' member
Error

Line: 211 Column: 20

                      # Different random seeds across iterations indicate different 'projections' of the gradients at different SGD steps.
        # If the same random projection is used,
        # there will be differences between the gradients that are never synchronized.
        self.rng = np.random.RandomState(random_seed)
        # Since there is only a single state instance for all the input buckets,
        # need to maintain a dictionary that maps each bucket index to the local error.
        self.error_dict = {}
        self.p_memory_dict = {}
        self.q_memory_dict = {}

            

Reported by Pylint.

Module 'torch' has no 'zeros' member
Error

Line: 349 Column: 46

                                  total_length
                )
            )
            state.error_dict[bucket_index] = torch.zeros(
                total_length, device=device, dtype=dtype
            )

        # Keep a copy of the input tensor,
        # so that we can compute the local error caused by compression later,

            

Reported by Pylint.

Module 'torch' has no 'clone' member
Error

Line: 356 Column: 27

                      # Keep a copy of the input tensor,
        # so that we can compute the local error caused by compression later,
        # by comparing this copy and the input tensor updated after decompression.
        input_tensor_cp = torch.clone(input_tensor).detach()

    # Unflatten the input tensor into per-parameter tensors, for layer-wise compression.
    tensors = bucket.gradients()

    # Step I: Divide all the tensors into two groups,

            

Reported by Pylint.

Module 'torch' has no 'cat' member
Error

Line: 388 Column: 9

                  # Step II: Handle uncompressed tensors.
    # Allocate contiguous memory for these tensors to allreduce efficiently.
    uncompressed_tensors_memory = (
        torch.cat([tensor.view(-1) for tensor in uncompressed_tensors])
        if uncompressed_tensors
        else torch.tensor([], device=device, dtype=dtype)
    )

    # Step III: Handle the tensors that should be compressed.

            

Reported by Pylint.

Module 'torch' has no 'tensor' member; maybe 'Tensor'?
Error

Line: 390 Column: 14

                  uncompressed_tensors_memory = (
        torch.cat([tensor.view(-1) for tensor in uncompressed_tensors])
        if uncompressed_tensors
        else torch.tensor([], device=device, dtype=dtype)
    )

    # Step III: Handle the tensors that should be compressed.
    # Allocate contiguous memory for Ps and Qs to allreduce efficiently.
    # If warm-start is enabled, reuse Ps and Qs from the previous iteration if possible.

            

Reported by Pylint.

Module 'torch' has no 'empty' member
Error

Line: 408 Column: 45

                                  total_Ps_size, total_Qs_size
                )
            )
        state.p_memory_dict[bucket_index] = torch.empty(
            total_Ps_size, device=device, dtype=dtype
        )
        state.q_memory_dict[bucket_index] = torch.empty(
            total_Qs_size, device=device, dtype=dtype
        )

            

Reported by Pylint.

Module 'torch' has no 'empty' member
Error

Line: 411 Column: 45

                      state.p_memory_dict[bucket_index] = torch.empty(
            total_Ps_size, device=device, dtype=dtype
        )
        state.q_memory_dict[bucket_index] = torch.empty(
            total_Qs_size, device=device, dtype=dtype
        )

    # Create Ps and Qs that point to the allocated memory.
    ps = []

            

Reported by Pylint.

Module 'torch' has no 'randn' member
Error

Line: 451 Column: 21

                          torch.manual_seed(state.rng.randint(1_000_000_000))
            for q in qs:
                q.copy_(
                    torch.randn(
                        *q.shape,
                        device="cpu",
                        dtype=dtype,
                    )
                )

            

Reported by Pylint.