The following issues were found

torch/fx/experimental/partitioner_utils.py
35 issues
Lambda may not be necessary
Error

Line: 31 Column: 28

              
    def add_node(self, node):
        input_nodes: Dict[Node, None] = {}
        map_arg(node.args, lambda n: input_nodes.setdefault(n))
        map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))
        # Add current node's input nodes if they are placeholder or constants
        for n in input_nodes:
            if n.op in {"placeholder", "get_attr"}:
                self.nodes.add(n)

            

Reported by Pylint.

Lambda may not be necessary
Error

Line: 32 Column: 30

                  def add_node(self, node):
        input_nodes: Dict[Node, None] = {}
        map_arg(node.args, lambda n: input_nodes.setdefault(n))
        map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))
        # Add current node's input nodes if they are placeholder or constants
        for n in input_nodes:
            if n.op in {"placeholder", "get_attr"}:
                self.nodes.add(n)
        self.nodes.add(node)

            

Reported by Pylint.

Lambda may not be necessary
Error

Line: 46 Column: 32

                          self.nodes.remove(node)
            # Collect the node's input nodes
            input_nodes: Dict[Node, None] = {}
            map_arg(node.args, lambda n: input_nodes.setdefault(n))
            map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))
            # Check if an input node is a placeholder or get_attr,
            # and this input node is not used by some other nodes in this partition,
            # the remove this input node
            for input_node in input_nodes:

            

Reported by Pylint.

Lambda may not be necessary
Error

Line: 47 Column: 34

                          # Collect the node's input nodes
            input_nodes: Dict[Node, None] = {}
            map_arg(node.args, lambda n: input_nodes.setdefault(n))
            map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))
            # Check if an input node is a placeholder or get_attr,
            # and this input node is not used by some other nodes in this partition,
            # the remove this input node
            for input_node in input_nodes:
                if all(

            

Reported by Pylint.

Lambda may not be necessary
Error

Line: 107 Column: 24

                  """
    # Find all its input nodes
    input_nodes: Dict[Node, None] = {}
    map_arg(node.args, lambda n: input_nodes.setdefault(n))
    map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))
    # Calculate total size of related nodes
    total_size_of_input_nodes = 0
    for n in input_nodes:
        # Make sure this node hasn't been in this set yet

            

Reported by Pylint.

Lambda may not be necessary
Error

Line: 108 Column: 26

                  # Find all its input nodes
    input_nodes: Dict[Node, None] = {}
    map_arg(node.args, lambda n: input_nodes.setdefault(n))
    map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))
    # Calculate total size of related nodes
    total_size_of_input_nodes = 0
    for n in input_nodes:
        # Make sure this node hasn't been in this set yet
        if n not in nodes:

            

Reported by Pylint.

Cell variable input_nodes defined in loop
Error

Line: 141 Column: 42

                          if node.op in {"placeholder", "get_attr"}:
                continue
            input_nodes: Dict[Node, None] = {}
            map_arg(node.args, lambda n: input_nodes.setdefault(n))
            map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))
            # If a node has no input nodes in this partition,
            # or its input nodes in this partition are placeholders and get_attrs
            # this node is on the top bfs level in this partition
            if not any(

            

Reported by Pylint.

Lambda may not be necessary
Error

Line: 141 Column: 32

                          if node.op in {"placeholder", "get_attr"}:
                continue
            input_nodes: Dict[Node, None] = {}
            map_arg(node.args, lambda n: input_nodes.setdefault(n))
            map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))
            # If a node has no input nodes in this partition,
            # or its input nodes in this partition are placeholders and get_attrs
            # this node is on the top bfs level in this partition
            if not any(

            

Reported by Pylint.

Lambda may not be necessary
Error

Line: 142 Column: 34

                              continue
            input_nodes: Dict[Node, None] = {}
            map_arg(node.args, lambda n: input_nodes.setdefault(n))
            map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))
            # If a node has no input nodes in this partition,
            # or its input nodes in this partition are placeholders and get_attrs
            # this node is on the top bfs level in this partition
            if not any(
                [

            

Reported by Pylint.

Cell variable input_nodes defined in loop
Error

Line: 142 Column: 44

                              continue
            input_nodes: Dict[Node, None] = {}
            map_arg(node.args, lambda n: input_nodes.setdefault(n))
            map_arg(node.kwargs, lambda n: input_nodes.setdefault(n))
            # If a node has no input nodes in this partition,
            # or its input nodes in this partition are placeholders and get_attrs
            # this node is on the top bfs level in this partition
            if not any(
                [

            

Reported by Pylint.

aten/src/ATen/gen_vulkan_glsl.py
34 issues
Starting a process with a shell, possible injection detected, security issue.
Security injection

Line: 14
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b605_start_process_with_a_shell.html

              
def findAllGlsls(path):
    cmd = "find " + path + " -name \"*.glsl\""
    vexs = os.popen(cmd).read().split('\n')
    output = []
    for f in vexs:
        if len(f) > 1:
            output.append(f)
    output.sort()

            

Reported by Bandit.

Unable to import 'tools.codegen.code_template'
Error

Line: 6 Column: 1

              import argparse
import sys
import os
from tools.codegen.code_template import CodeTemplate

H_NAME = "glsl.h"
CPP_NAME = "glsl.cpp"
DEFAULT_ENV = {"precision": "highp"}


            

Reported by Pylint.

Unused variable 'srcPath'
Error

Line: 43 Column: 9

                      cpp += "const char* " + name + " = \n"

        codeTemplate = CodeTemplate.from_file(templateGlslPath)
        srcPath = tmpDirPath + "/" + name + ".glsl"
        content = codeTemplate.substitute(env)

        lines = content.split("\n")
        for l in lines:
            if (len(l) < 1):

            

Reported by Pylint.

Unused argument 'argv'
Error

Line: 74 Column: 10

                  return d


def main(argv):
    parser = argparse.ArgumentParser(description='Generate glsl.cpp and glsl.h containing glsl sources')
    parser.add_argument(
        '-i',
        '--glsl-path',
        help='path to directory with glsl to process',

            

Reported by Pylint.

Probable insecure usage of temp file/directory.
Security

Line: 91
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b108_hardcoded_tmp_directory.html

                      '-t',
        '--tmp-dir-path',
        required=True,
        help='/tmp')
    parser.add_argument(
        "--env",
        metavar="KEY=VALUE",
        nargs='*',
        help="Set a number of key-value pairs")

            

Reported by Bandit.

Missing module docstring
Error

Line: 1 Column: 1

              #!/usr/bin/env python3

import argparse
import sys
import os
from tools.codegen.code_template import CodeTemplate

H_NAME = "glsl.h"
CPP_NAME = "glsl.cpp"

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 12 Column: 1

              CPP_NAME = "glsl.cpp"
DEFAULT_ENV = {"precision": "highp"}

def findAllGlsls(path):
    cmd = "find " + path + " -name \"*.glsl\""
    vexs = os.popen(cmd).read().split('\n')
    output = []
    for f in vexs:
        if len(f) > 1:

            

Reported by Pylint.

Function name "findAllGlsls" doesn't conform to snake_case naming style
Error

Line: 12 Column: 1

              CPP_NAME = "glsl.cpp"
DEFAULT_ENV = {"precision": "highp"}

def findAllGlsls(path):
    cmd = "find " + path + " -name \"*.glsl\""
    vexs = os.popen(cmd).read().split('\n')
    output = []
    for f in vexs:
        if len(f) > 1:

            

Reported by Pylint.

Variable name "f" doesn't conform to snake_case naming style
Error

Line: 16 Column: 9

                  cmd = "find " + path + " -name \"*.glsl\""
    vexs = os.popen(cmd).read().split('\n')
    output = []
    for f in vexs:
        if len(f) > 1:
            output.append(f)
    output.sort()
    return output


            

Reported by Pylint.

Argument name "filePath" doesn't conform to snake_case naming style
Error

Line: 22 Column: 1

                  output.sort()
    return output

def getName(filePath):
    return os.path.basename(filePath).replace("/", "_").replace(".", "_")

def genCppH(hFilePath, cppFilePath, templateGlslPaths, tmpDirPath, env):
    print("hFilePath:{}".format(hFilePath))
    print("cppFilePath:{}".format(cppFilePath))

            

Reported by Pylint.

torch/nn/parallel/comm.py
34 issues
Module 'torch' has no 'empty_like' member
Error

Line: 95 Column: 18

                      return inputs[0]

    if nccl.is_available(inputs):
        result = torch.empty_like(inputs[root_index])
        nccl.reduce(inputs, output=result, root=root_index)
    else:
        destination_device = torch.device(inputs[root_index].device.type, destination)
        nonroot = [t for i, t in enumerate(inputs) if i != root_index]
        # make a new tensor w/o clone

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 98 Column: 30

                      result = torch.empty_like(inputs[root_index])
        nccl.reduce(inputs, output=result, root=root_index)
    else:
        destination_device = torch.device(inputs[root_index].device.type, destination)
        nonroot = [t for i, t in enumerate(inputs) if i != root_index]
        # make a new tensor w/o clone
        result = inputs[root_index] + nonroot[0].to(device=destination_device, non_blocking=True)
        for other in nonroot[1:]:
            result.add_(other.to(device=destination_device, non_blocking=True))

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 36 Column: 16

                          "devices={} and out={}".format(devices, out))
    if devices is not None:
        devices = [_get_device_index(d) for d in devices]
        return torch._C._broadcast(tensor, devices)
    else:
        return torch._C._broadcast_out(tensor, out)


def broadcast_coalesced(tensors, devices, buffer_size=10485760):

            

Reported by Pylint.

Access to a protected member _broadcast of a client class
Error

Line: 36 Column: 16

                          "devices={} and out={}".format(devices, out))
    if devices is not None:
        devices = [_get_device_index(d) for d in devices]
        return torch._C._broadcast(tensor, devices)
    else:
        return torch._C._broadcast_out(tensor, out)


def broadcast_coalesced(tensors, devices, buffer_size=10485760):

            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 38 Column: 16

                      devices = [_get_device_index(d) for d in devices]
        return torch._C._broadcast(tensor, devices)
    else:
        return torch._C._broadcast_out(tensor, out)


def broadcast_coalesced(tensors, devices, buffer_size=10485760):
    """Broadcasts a sequence tensors to the specified GPUs.
    Small tensors are first coalesced into a buffer to reduce the number

            

Reported by Pylint.

Access to a protected member _broadcast_out of a client class
Error

Line: 38 Column: 16

                      devices = [_get_device_index(d) for d in devices]
        return torch._C._broadcast(tensor, devices)
    else:
        return torch._C._broadcast_out(tensor, out)


def broadcast_coalesced(tensors, devices, buffer_size=10485760):
    """Broadcasts a sequence tensors to the specified GPUs.
    Small tensors are first coalesced into a buffer to reduce the number

            

Reported by Pylint.

Access to a protected member _broadcast_coalesced of a client class
Error

Line: 58 Column: 12

                  """
    devices = [_get_device_index(d) for d in devices]
    tensors = [_handle_complex(t) for t in tensors]
    return torch._C._broadcast_coalesced(tensors, devices, buffer_size)


def reduce_add(inputs, destination=None):
    """Sums tensors from multiple GPUs.


            

Reported by Pylint.

Access to a protected member _C of a client class
Error

Line: 58 Column: 12

                  """
    devices = [_get_device_index(d) for d in devices]
    tensors = [_handle_complex(t) for t in tensors]
    return torch._C._broadcast_coalesced(tensors, devices, buffer_size)


def reduce_add(inputs, destination=None):
    """Sums tensors from multiple GPUs.


            

Reported by Pylint.

TODO: When `len(inputs) == 1` and all inputs are on `destination`, just
Error

Line: 124 Column: 3

                      A tuple of tensors containing an elementwise sum of each group of
        inputs, placed on the ``destination`` device.
    """
    # TODO: When `len(inputs) == 1` and all inputs are on `destination`, just
    #       return `inputs`.
    dense_tensors: List[List] = [[] for _ in inputs]  # shape (num_gpus, num_tensors)
    output = []
    ref_order = []
    # process sparse ones first since they may have different sizes on different gpus

            

Reported by Pylint.

Access to a protected member _scatter of a client class
Error

Line: 189 Column: 22

                  tensor = _handle_complex(tensor)
    if out is None:
        devices = [_get_device_index(d) for d in devices]
        return tuple(torch._C._scatter(tensor, devices, chunk_sizes, dim, streams))
    else:
        if devices is not None:
            raise RuntimeError(
                "'devices' must not be specified when 'out' is specified, but "
                "got devices={}".format(devices))

            

Reported by Pylint.

torch/distributed/elastic/rendezvous/dynamic_rendezvous.py
34 issues
Attempted relative import beyond top-level package
Error

Line: 27 Column: 1

                  construct_and_record_rdzv_event,
)

from .api import (
    RendezvousClosedError,
    RendezvousError,
    RendezvousHandler,
    RendezvousParameters,
    RendezvousStateError,

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 35 Column: 1

                  RendezvousStateError,
    RendezvousTimeoutError,
)
from .utils import _delay, _PeriodicTimer

log = logging.getLogger(__name__)


def get_method_name(depth=2):

            

Reported by Pylint.

Pickle and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue.
Security blacklist

Line: 414
Suggestion: https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b301-pickle

              
        if state_bits is not None:
            try:
                self._state = pickle.loads(state_bits)
            except pickle.PickleError as exc:
                raise RendezvousStateError(
                    "The rendezvous state is corrupt. See inner exception for details."
                ) from exc
        else:

            

Reported by Bandit.

Access to a protected member _keep_alive of a client class
Error

Line: 1133 Column: 13

                  def _keep_alive_weak(weak_self) -> None:
        self = weak_self()
        if self is not None:
            self._keep_alive()

    def _keep_alive(self) -> None:
        self._heartbeat_lock.acquire()

        op = _RendezvousKeepAliveOp()

            

Reported by Pylint.

Too many lines in module (1248/1000)
Error

Line: 1 Column: 1

              # Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import inspect
import logging
import os

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import inspect
import logging
import os

            

Reported by Pylint.

Consider possible security implications associated with pickle module.
Security blacklist

Line: 10
Suggestion: https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle

              import inspect
import logging
import os
import pickle
import socket
import threading
import time
import weakref
from abc import ABC, abstractmethod

            

Reported by Bandit.

Missing function or method docstring
Error

Line: 40 Column: 1

              log = logging.getLogger(__name__)


def get_method_name(depth=2):
    if len(inspect.stack()) > depth:
        return inspect.stack()[depth].function
    return "no_method_name"



            

Reported by Pylint.

Too few public methods (1/2)
Error

Line: 231 Column: 1

                      return f"{self.fqdn}_{self.pid}_{self.local_id}"


class _NodeDescGenerator:
    """Generates node descriptors.

    A node descriptor is a combination of an FQDN, a process id, and an auto-
    incremented integer that uniquely identifies a node in the rendezvous.
    """

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 247 Column: 5

                      # An integer that is incremented with each call to generate().
        self._local_id = 0

    def generate(self) -> _NodeDesc:
        # This method can be called by multiple threads concurrently; therefore,
        # we must increment the integer atomically.
        with self._lock:
            local_id = self._local_id


            

Reported by Pylint.

caffe2/python/operator_test/batch_sparse_to_dense_op_test.py
34 issues
Unable to import 'hypothesis'
Error

Line: 9 Column: 1

              from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import given, settings
import hypothesis.strategies as st
import numpy as np


class TestBatchSparseToDense(serial.SerializedTestCase):

            

Reported by Pylint.

Unable to import 'hypothesis.strategies'
Error

Line: 10 Column: 1

              import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import given, settings
import hypothesis.strategies as st
import numpy as np


class TestBatchSparseToDense(serial.SerializedTestCase):


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              




from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import given, settings

            

Reported by Pylint.

Missing class docstring
Error

Line: 14 Column: 1

              import numpy as np


class TestBatchSparseToDense(serial.SerializedTestCase):

    @given(
        batch_size=st.integers(5, 10),
        dense_last_dim=st.integers(5, 10),
        default_value=st.floats(min_value=2.0, max_value=3.0),

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 22 Column: 5

                      default_value=st.floats(min_value=2.0, max_value=3.0),
        **hu.gcs
    )
    @settings(deadline=None)
    def test_batch_sparse_to_dense(
        self, batch_size, dense_last_dim, default_value, gc, dc
    ):
        L = np.random.randint(1, dense_last_dim + 1, size=(batch_size))
        num_data = L.sum()

            

Reported by Pylint.

Argument name "dc" doesn't conform to snake_case naming style
Error

Line: 22 Column: 5

                      default_value=st.floats(min_value=2.0, max_value=3.0),
        **hu.gcs
    )
    @settings(deadline=None)
    def test_batch_sparse_to_dense(
        self, batch_size, dense_last_dim, default_value, gc, dc
    ):
        L = np.random.randint(1, dense_last_dim + 1, size=(batch_size))
        num_data = L.sum()

            

Reported by Pylint.

Too many local variables (16/15)
Error

Line: 22 Column: 5

                      default_value=st.floats(min_value=2.0, max_value=3.0),
        **hu.gcs
    )
    @settings(deadline=None)
    def test_batch_sparse_to_dense(
        self, batch_size, dense_last_dim, default_value, gc, dc
    ):
        L = np.random.randint(1, dense_last_dim + 1, size=(batch_size))
        num_data = L.sum()

            

Reported by Pylint.

Too many arguments (6/5)
Error

Line: 22 Column: 5

                      default_value=st.floats(min_value=2.0, max_value=3.0),
        **hu.gcs
    )
    @settings(deadline=None)
    def test_batch_sparse_to_dense(
        self, batch_size, dense_last_dim, default_value, gc, dc
    ):
        L = np.random.randint(1, dense_last_dim + 1, size=(batch_size))
        num_data = L.sum()

            

Reported by Pylint.

Argument name "gc" doesn't conform to snake_case naming style
Error

Line: 22 Column: 5

                      default_value=st.floats(min_value=2.0, max_value=3.0),
        **hu.gcs
    )
    @settings(deadline=None)
    def test_batch_sparse_to_dense(
        self, batch_size, dense_last_dim, default_value, gc, dc
    ):
        L = np.random.randint(1, dense_last_dim + 1, size=(batch_size))
        num_data = L.sum()

            

Reported by Pylint.

Variable name "L" doesn't conform to snake_case naming style
Error

Line: 26 Column: 9

                  def test_batch_sparse_to_dense(
        self, batch_size, dense_last_dim, default_value, gc, dc
    ):
        L = np.random.randint(1, dense_last_dim + 1, size=(batch_size))
        num_data = L.sum()
        # The following logic ensure that indices in each batch will not be duplicated
        I = np.array([]).astype(np.int32)
        for l in L:
            I_l = np.random.choice(dense_last_dim, l, replace=False)

            

Reported by Pylint.

torch/fx/experimental/unification/unification_tools.py
34 issues
Missing module docstring
Error

Line: 1 Column: 1

              import collections
import operator
from functools import reduce
from collections.abc import Mapping

__all__ = ('merge', 'merge_with', 'valmap', 'keymap', 'itemmap',
           'valfilter', 'keyfilter', 'itemfilter',
           'assoc', 'dissoc', 'assoc_in', 'update_in', 'get_in')


            

Reported by Pylint.

Argument name "f" doesn't conform to snake_case naming style
Error

Line: 10 Column: 1

                         'valfilter', 'keyfilter', 'itemfilter',
           'assoc', 'dissoc', 'assoc_in', 'update_in', 'get_in')

def _get_factory(f, kwargs):
    factory = kwargs.pop('factory', dict)
    if kwargs:
        raise TypeError("{}() got an unexpected keyword argument "
                        "'{}'".format(f.__name__, kwargs.popitem()[0]))
    return factory

            

Reported by Pylint.

Variable name "rv" doesn't conform to snake_case naming style
Error

Line: 36 Column: 5

                      dicts = dicts[0]
    factory = _get_factory(merge, kwargs)

    rv = factory()
    for d in dicts:
        rv.update(d)
    return rv



            

Reported by Pylint.

Variable name "d" doesn't conform to snake_case naming style
Error

Line: 37 Column: 9

                  factory = _get_factory(merge, kwargs)

    rv = factory()
    for d in dicts:
        rv.update(d)
    return rv


def merge_with(func, *dicts, **kwargs):

            

Reported by Pylint.

Variable name "d" doesn't conform to snake_case naming style
Error

Line: 62 Column: 9

                  factory = _get_factory(merge_with, kwargs)

    result = factory()
    for d in dicts:
        for k, v in d.items():
            if k not in result:
                result[k] = [v]
            else:
                result[k].append(v)

            

Reported by Pylint.

Variable name "v" doesn't conform to snake_case naming style
Error

Line: 63 Column: 16

              
    result = factory()
    for d in dicts:
        for k, v in d.items():
            if k not in result:
                result[k] = [v]
            else:
                result[k].append(v)
    return valmap(func, result, factory)

            

Reported by Pylint.

Argument name "d" doesn't conform to snake_case naming style
Error

Line: 71 Column: 1

                  return valmap(func, result, factory)


def valmap(func, d, factory=dict):
    """ Apply function to values of dictionary

    >>> bills = {"Alice": [20, 15, 30], "Bob": [10, 35]}
    >>> valmap(sum, bills)  # doctest: +SKIP
    {'Alice': 65, 'Bob': 45}

            

Reported by Pylint.

Variable name "rv" doesn't conform to snake_case naming style
Error

Line: 82 Column: 5

                      keymap
        itemmap
    """
    rv = factory()
    rv.update(zip(d.keys(), map(func, d.values())))
    return rv


def keymap(func, d, factory=dict):

            

Reported by Pylint.

Argument name "d" doesn't conform to snake_case naming style
Error

Line: 87 Column: 1

                  return rv


def keymap(func, d, factory=dict):
    """ Apply function to keys of dictionary

    >>> bills = {"Alice": [20, 15, 30], "Bob": [10, 35]}
    >>> keymap(str.lower, bills)  # doctest: +SKIP
    {'alice': [20, 15, 30], 'bob': [10, 35]}

            

Reported by Pylint.

Variable name "rv" doesn't conform to snake_case naming style
Error

Line: 98 Column: 5

                      valmap
        itemmap
    """
    rv = factory()
    rv.update(zip(map(func, d.keys()), d.values()))
    return rv


def itemmap(func, d, factory=dict):

            

Reported by Pylint.

torch/distributed/algorithms/ddp_comm_hooks/ddp_zero_hook.py
34 issues
Access to a protected member _overlap_info of a client class
Error

Line: 38 Column: 20

                      This function assumes that appropriate synchronization has taken place
        so that the bucket's gradients can be used.
    """
    overlap_info = zero._overlap_info
    bucket_index = bucket.index()
    assert len(zero.optim.param_groups) == 1, \
        "Overlapping DDP with ZeRO only supports a single parameter group"

    # Construct the `gradients` input for the local optimizer step, which

            

Reported by Pylint.

Access to a protected member _bucket_assignments_per_rank of a client class
Error

Line: 52 Column: 25

                  assert bucket_index in overlap_info.offsets, \
        f"Bucket index {bucket_index} was not assigned to rank {rank}"
    gradients_offset = overlap_info.offsets[bucket_index]
    bucket_assignment = zero._bucket_assignments_per_rank[rank][bucket_index]
    bucket_offset = bucket_assignment.offset
    length = len(bucket_assignment.parameters)
    bucket_gradients = bucket.gradients()[bucket_offset:bucket_offset + length]
    for i, grad in enumerate(bucket_gradients):
        gradients[gradients_offset + i] = grad

            

Reported by Pylint.

Access to a protected member _local_step of a client class
Error

Line: 59 Column: 5

                  for i, grad in enumerate(bucket_gradients):
        gradients[gradients_offset + i] = grad

    zero._local_step(gradients)


def _broadcast_bucket(
    bucket_index: int,
    zero: ZeroRedundancyOptimizer,

            

Reported by Pylint.

Access to a protected member _overlap_info of a client class
Error

Line: 75 Column: 20

                      zero (ZeroRedundancyOptimizer): the calling process's
            :class:`ZeroRedundancyOptimizer` instance.
    """
    overlap_info = zero._overlap_info
    assert len(overlap_info.assigned_ranks_per_bucket) > bucket_index, \
        "`assigned_ranks_per_bucket` is not fully constructed"
    # Sort to ensure the same ordering across ranks
    assigned_ranks = sorted(overlap_info.assigned_ranks_per_bucket[bucket_index])
    assert len(assigned_ranks) > 0, f"Bucket {bucket_index} should be " \

            

Reported by Pylint.

Access to a protected member _bucket_assignments_per_rank of a client class
Error

Line: 83 Column: 30

                  assert len(assigned_ranks) > 0, f"Bucket {bucket_index} should be " \
        "assigned to at least one rank"
    for assigned_rank in assigned_ranks:
        bucket_assignments = zero._bucket_assignments_per_rank[assigned_rank]
        if bucket_index in bucket_assignments:
            overlap_info.broadcast_handles.append(
                dist.broadcast(
                    bucket_assignments[bucket_index].tensor,
                    src=_get_global_rank(zero.process_group, assigned_rank),

            

Reported by Pylint.

Access to a protected member _overlap_info of a client class
Error

Line: 111 Column: 20

                      zero (ZeroRedundancyOptimizer): the calling process's
            :class:`ZeroRedundancyOptimizer` instance.
    """
    overlap_info = zero._overlap_info
    bucket_params = bucket.parameters()
    assert len(bucket_params) > 0, "Empty bucket"

    # Save the parameters in the bucket
    overlap_info.params_per_bucket.append(bucket_params)

            

Reported by Pylint.

Access to a protected member _has_rebuilt_buckets of a client class
Error

Line: 146 Column: 12

                      bucket (dist.GradBucket): the current gradient bucket.
    """
    # Proceed as normal until the DDP buckets have been rebuilt
    if not ddp_ref()._has_rebuilt_buckets:  # type: ignore[union-attr]
        assert zero._overlap_info.status == _OverlapStatus.UNINITIALIZED
        return

    bucket_index = bucket.index()
    overlap_info = zero._overlap_info

            

Reported by Pylint.

Access to a protected member _overlap_info of a client class
Error

Line: 147 Column: 16

                  """
    # Proceed as normal until the DDP buckets have been rebuilt
    if not ddp_ref()._has_rebuilt_buckets:  # type: ignore[union-attr]
        assert zero._overlap_info.status == _OverlapStatus.UNINITIALIZED
        return

    bucket_index = bucket.index()
    overlap_info = zero._overlap_info
    if overlap_info.status == _OverlapStatus.UNINITIALIZED:

            

Reported by Pylint.

Access to a protected member _overlap_info of a client class
Error

Line: 151 Column: 20

                      return

    bucket_index = bucket.index()
    overlap_info = zero._overlap_info
    if overlap_info.status == _OverlapStatus.UNINITIALIZED:
        overlap_info.status = _OverlapStatus.DDP_HAS_REBUILT_BUCKETS

    if overlap_info.status == _OverlapStatus.DDP_HAS_REBUILT_BUCKETS:
        if bucket_index == 0 and len(overlap_info.params_per_bucket) > 0:

            

Reported by Pylint.

Access to a protected member _init_zero_for_overlap of a client class
Error

Line: 160 Column: 13

                          # This corresponds to the first bucket of the backward pass
            # immediately after all information has been saved, so we
            # can perform the delayed ZeRO initialization
            zero._init_zero_for_overlap()
        else:
            # Once DDP buckets have been rebuilt but ZeRO has not been
            # properly initialized yet, save the information needed
            _save_ddp_bucket_info(bucket, zero)


            

Reported by Pylint.

test/cpp_api_parity/module_impl_check.py
34 issues
Unable to import 'torch'
Error

Line: 23 Column: 1

              import pprint
import os

import torch
from cpp_api_parity.utils import TorchNNModuleTestParams, TORCH_NN_COMMON_TEST_HARNESS, \
    compile_cpp_code_inline, set_python_tensors_requires_grad, move_python_tensors_to_device, \
    add_test, compute_cpp_args_construction_stmts_and_forward_arg_symbols, serialize_arg_dict_as_script_module, \
    compute_arg_dict, decorate_test_fn, compute_temp_file_path, generate_error_msg, is_torch_nn_functional_test, \
    try_remove_folder

            

Reported by Pylint.

Unused argument 'unit_test_class'
Error

Line: 86 Column: 33

              }
""")

def run_python_forward_backward(unit_test_class, test_params):
    device = test_params.device
    module = test_params.test_instance.constructor(*test_params.test_instance.constructor_args).to(device)

    inputs = set_python_tensors_requires_grad(move_python_tensors_to_device(
        [arg_value for _, arg_value in test_params.arg_dict['input']], device))

            

Reported by Pylint.

Access to a protected member _testMethodName of a client class
Error

Line: 252 Column: 90

              
        def test_fn(self):
            test_forward_backward(
                unit_test_class=self, test_params=unit_test_class.module_test_params_map[self._testMethodName])

        test_fn = decorate_test_fn(
            test_fn=test_fn,
            test_cuda=test_params_dict.get('test_cuda', True),
            has_impl_parity=parity_table['torch::nn'][module_full_name][0] and

            

Reported by Pylint.

Unused variable 'test_name'
Error

Line: 288 Column: 9

                  assert len(unit_test_class.module_test_params_map) > 0
    cpp_sources = TORCH_NN_COMMON_TEST_HARNESS + SAMPLE_MODULE_CPP_SOURCE
    functions = []
    for test_name, test_params in unit_test_class.module_test_params_map.items():
        cpp_sources += generate_test_cpp_sources(
            test_params=test_params, template=TORCH_NN_MODULE_TEST_FORWARD_BACKWARD)
        functions.append('{}_test_forward_backward'.format(test_params.module_variant_name))
    if print_cpp_source:
        print(cpp_sources)

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # The purpose of this test is to check that we have implementation parity between
# a Python `torch.nn` module and its corresponding C++ `torch::nn` module. Concretely,
# this test does the following:
#
# 1. Get a test params dict from common_nn.py, run forward and backward on the
# Python module created using the test params.
#
# 2. Serialize the Python module's parameters / buffers and its forward input
# arguments, deserialize them in C++ and load them into the C++ module.

            

Reported by Pylint.

Line too long (113/100)
Error

Line: 26 Column: 1

              import torch
from cpp_api_parity.utils import TorchNNModuleTestParams, TORCH_NN_COMMON_TEST_HARNESS, \
    compile_cpp_code_inline, set_python_tensors_requires_grad, move_python_tensors_to_device, \
    add_test, compute_cpp_args_construction_stmts_and_forward_arg_symbols, serialize_arg_dict_as_script_module, \
    compute_arg_dict, decorate_test_fn, compute_temp_file_path, generate_error_msg, is_torch_nn_functional_test, \
    try_remove_folder
from cpp_api_parity.sample_module import SAMPLE_MODULE_CPP_SOURCE

# Expected substitutions:

            

Reported by Pylint.

Line too long (114/100)
Error

Line: 27 Column: 1

              from cpp_api_parity.utils import TorchNNModuleTestParams, TORCH_NN_COMMON_TEST_HARNESS, \
    compile_cpp_code_inline, set_python_tensors_requires_grad, move_python_tensors_to_device, \
    add_test, compute_cpp_args_construction_stmts_and_forward_arg_symbols, serialize_arg_dict_as_script_module, \
    compute_arg_dict, decorate_test_fn, compute_temp_file_path, generate_error_msg, is_torch_nn_functional_test, \
    try_remove_folder
from cpp_api_parity.sample_module import SAMPLE_MODULE_CPP_SOURCE

# Expected substitutions:
#

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 86 Column: 1

              }
""")

def run_python_forward_backward(unit_test_class, test_params):
    device = test_params.device
    module = test_params.test_instance.constructor(*test_params.test_instance.constructor_args).to(device)

    inputs = set_python_tensors_requires_grad(move_python_tensors_to_device(
        [arg_value for _, arg_value in test_params.arg_dict['input']], device))

            

Reported by Pylint.

Line too long (106/100)
Error

Line: 88 Column: 1

              
def run_python_forward_backward(unit_test_class, test_params):
    device = test_params.device
    module = test_params.test_instance.constructor(*test_params.test_instance.constructor_args).to(device)

    inputs = set_python_tensors_requires_grad(move_python_tensors_to_device(
        [arg_value for _, arg_value in test_params.arg_dict['input']], device))
    inputs += move_python_tensors_to_device(
        [arg_value for _, arg_value in test_params.arg_dict['target']], device)

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 126 Column: 1

              
    return script_module, python_output, python_grad_dict

def test_forward_backward(unit_test_class, test_params):
    module_variant_name = test_params.module_variant_name
    cpp_tmp_folder = test_params.cpp_tmp_folder
    # Remove the temporary folder if it exists already
    try_remove_folder(cpp_tmp_folder)
    os.mkdir(cpp_tmp_folder)

            

Reported by Pylint.

caffe2/experiments/python/SparseTransformer.py
34 issues
Unable to import 'scipy.sparse'
Error

Line: 23 Column: 1

              

from caffe2.python import workspace
import scipy.sparse


class NetDefNode():

    def __init__(self, name, optype, p=None, op=None):

            

Reported by Pylint.

Unused argument 'id2node'
Error

Line: 73 Column: 22

                  return weight_name + "wcsr", weight_name + "iw", weight_name + "jw"


def transFCRelu(cur, id2node, name2id, ops, model):
    """
    Add trans before and after this FC_Prune->(Relu)->FC_Prune chain.
    """
    # 1. add trans before the start of this chain
    # assuming that cur is a FC_Prune, and it has only one input

            

Reported by Pylint.

Unused argument 'ops'
Error

Line: 73 Column: 40

                  return weight_name + "wcsr", weight_name + "iw", weight_name + "jw"


def transFCRelu(cur, id2node, name2id, ops, model):
    """
    Add trans before and after this FC_Prune->(Relu)->FC_Prune chain.
    """
    # 1. add trans before the start of this chain
    # assuming that cur is a FC_Prune, and it has only one input

            

Reported by Pylint.

Unused argument 'name2id'
Error

Line: 73 Column: 31

                  return weight_name + "wcsr", weight_name + "iw", weight_name + "jw"


def transFCRelu(cur, id2node, name2id, ops, model):
    """
    Add trans before and after this FC_Prune->(Relu)->FC_Prune chain.
    """
    # 1. add trans before the start of this chain
    # assuming that cur is a FC_Prune, and it has only one input

            

Reported by Pylint.

TODO(wyiming): check whether it is correct here
Error

Line: 81 Column: 3

                  # assuming that cur is a FC_Prune, and it has only one input
    pre = cur.prev.itervalues().next()
    # Create a node /op and insert it.
    # TODO(wyiming): check whether it is correct here
    current_blob = model.Transpose(cur.op.input[0], cur.op.input[0] + "_trans")
#     print model.net.Proto()
    trans_op = model.net.Proto().op[-1]
    trans_node = NetDefNode(trans_op.output[0], "Transpose", pre, trans_op)
    trans_node.visited = True

            

Reported by Pylint.

TODO(wyiming): create a new Op here
Error

Line: 102 Column: 3

                          op = cur.op
            wcsr, iw, jw = maskNallocate(op.input[1])
            bias_name = op.input[3]
            # TODO(wyiming): create a new Op here
            current_blob = model.FC_Sparse(current_blob,
                                           cur.op.output[0] + "_Sparse",
                                           wcsr, iw, jw, bias_name)
            sps_op = model.net.Proto().op[-1]
            sps_node = NetDefNode(cur.op.output[0] + "_Sparse",

            

Reported by Pylint.

Unused variable 'name'
Error

Line: 152 Column: 9

                      transFCRelu(cur, id2node, name2id, ops, model)

    cur.visited = True
    for name, n in cur.ops.iteritems():
        Prune2Sparse(n, id2node, name2id, ops, model)


def net2list(net_root):
    """

            

Reported by Pylint.

TODO: write a non-layer checker and log it
Error

Line: 195 Column: 3

                      for input_name in op.input:
            if input_name not in net_name2id:
                # assume that un_occured name are non_layers
                # TODO: write a non-layer checker and log it
                continue
            op_node.insertInput(net_id2node[net_name2id[input_name]])
            if_has_layer_input = True

        if not if_has_layer_input:

            

Reported by Pylint.

Module name "SparseTransformer" doesn't conform to snake_case naming style
Error

Line: 1 Column: 1

              # Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software

            

Reported by Pylint.

caffe2/python/predictor/predictor_exporter.py
34 issues
Module 'caffe2.python._import_c_extension' has no 'Workspace' member
Error

Line: 180 Column: 16

                  """
    """

    ws = ws or workspace.C.Workspace.current
    meta_net_def = metanet_pb2.MetaNetDef()

    # Predict net is the core network that we use.
    utils.AddNet(meta_net_def, predictor_export_meta.predict_init_name(),
                 utils.create_predict_init_net(ws, predictor_export_meta))

            

Reported by Pylint.

Keyword argument before variable positional arguments list in the definition of save_to_db function
Error

Line: 206 Column: 1

                  meta_net_def.modelInfo.version = version


def save_to_db(db_type, db_destination, predictor_export_meta, use_ideep=False,
               *args, **kwargs):
    meta_net_def = get_meta_net_def(predictor_export_meta, db_type=db_type)
    device_type = caffe2_pb2.IDEEP if use_ideep else caffe2_pb2.CPU
    with core.DeviceScope(core.DeviceOption(caffe2_pb2.CPU)):
        workspace.FeedBlob(

            

Reported by Pylint.

Unused argument 'args'
Error

Line: 206 Column: 1

                  meta_net_def.modelInfo.version = version


def save_to_db(db_type, db_destination, predictor_export_meta, use_ideep=False,
               *args, **kwargs):
    meta_net_def = get_meta_net_def(predictor_export_meta, db_type=db_type)
    device_type = caffe2_pb2.IDEEP if use_ideep else caffe2_pb2.CPU
    with core.DeviceScope(core.DeviceOption(caffe2_pb2.CPU)):
        workspace.FeedBlob(

            

Reported by Pylint.

Unused argument 'args'
Error

Line: 231 Column: 1

                  workspace.RunOperatorOnce(op)


def load_from_db(filename, db_type, device_option=None, *args, **kwargs):
    # global_init_net in meta_net_def will load parameters from
    # predictor_constants.PREDICTOR_DBREADER
    create_db = core.CreateOperator(
        'CreateDB', [],
        [core.BlobReference(predictor_constants.PREDICTOR_DBREADER)],

            

Reported by Pylint.

Keyword argument before variable positional arguments list in the definition of load_from_db function
Error

Line: 231 Column: 1

                  workspace.RunOperatorOnce(op)


def load_from_db(filename, db_type, device_option=None, *args, **kwargs):
    # global_init_net in meta_net_def will load parameters from
    # predictor_constants.PREDICTOR_DBREADER
    create_db = core.CreateOperator(
        'CreateDB', [],
        [core.BlobReference(predictor_constants.PREDICTOR_DBREADER)],

            

Reported by Pylint.

Unused argument 'kwargs'
Error

Line: 231 Column: 1

                  workspace.RunOperatorOnce(op)


def load_from_db(filename, db_type, device_option=None, *args, **kwargs):
    # global_init_net in meta_net_def will load parameters from
    # predictor_constants.PREDICTOR_DBREADER
    create_db = core.CreateOperator(
        'CreateDB', [],
        [core.BlobReference(predictor_constants.PREDICTOR_DBREADER)],

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              ## @package predictor_exporter
# Module caffe2.python.predictor.predictor_exporter





from caffe2.proto import caffe2_pb2
from caffe2.proto import metanet_pb2

            

Reported by Pylint.

standard import "from builtins import bytes" should be placed before "from caffe2.proto import caffe2_pb2"
Error

Line: 14 Column: 1

              from caffe2.python.predictor_constants import predictor_constants
import caffe2.python.predictor.serde as serde
import caffe2.python.predictor.predictor_py_utils as utils
from builtins import bytes
import collections


def get_predictor_exporter_helper(submodelNetName):
    """ constracting stub for the PredictorExportMeta

            

Reported by Pylint.

standard import "import collections" should be placed before "from caffe2.proto import caffe2_pb2"
Error

Line: 15 Column: 1

              import caffe2.python.predictor.serde as serde
import caffe2.python.predictor.predictor_py_utils as utils
from builtins import bytes
import collections


def get_predictor_exporter_helper(submodelNetName):
    """ constracting stub for the PredictorExportMeta
        Only used to construct names to subfields,

            

Reported by Pylint.

Argument name "submodelNetName" doesn't conform to snake_case naming style
Error

Line: 18 Column: 1

              import collections


def get_predictor_exporter_helper(submodelNetName):
    """ constracting stub for the PredictorExportMeta
        Only used to construct names to subfields,
        such as calling to predict_net_name
        Args:
            submodelNetName - name of the model

            

Reported by Pylint.