The following issues were found

torch/distributions/normal.py
14 issues
Module 'torch' has no 'Size' member
Error

Line: 47 Column: 27

                  def __init__(self, loc, scale, validate_args=None):
        self.loc, self.scale = broadcast_all(loc, scale)
        if isinstance(loc, Number) and isinstance(scale, Number):
            batch_shape = torch.Size()
        else:
            batch_shape = self.loc.size()
        super(Normal, self).__init__(batch_shape, validate_args=validate_args)

    def expand(self, batch_shape, _instance=None):

            

Reported by Pylint.

Module 'torch' has no 'Size' member
Error

Line: 54 Column: 23

              
    def expand(self, batch_shape, _instance=None):
        new = self._get_checked_instance(Normal, _instance)
        batch_shape = torch.Size(batch_shape)
        new.loc = self.loc.expand(batch_shape)
        new.scale = self.scale.expand(batch_shape)
        super(Normal, new).__init__(batch_shape, validate_args=False)
        new._validate_args = self._validate_args
        return new

            

Reported by Pylint.

Module 'torch' has no 'Size' member
Error

Line: 61 Column: 35

                      new._validate_args = self._validate_args
        return new

    def sample(self, sample_shape=torch.Size()):
        shape = self._extended_shape(sample_shape)
        with torch.no_grad():
            return torch.normal(self.loc.expand(shape), self.scale.expand(shape))

    def rsample(self, sample_shape=torch.Size()):

            

Reported by Pylint.

Module 'torch' has no 'normal' member
Error

Line: 64 Column: 20

                  def sample(self, sample_shape=torch.Size()):
        shape = self._extended_shape(sample_shape)
        with torch.no_grad():
            return torch.normal(self.loc.expand(shape), self.scale.expand(shape))

    def rsample(self, sample_shape=torch.Size()):
        shape = self._extended_shape(sample_shape)
        eps = _standard_normal(shape, dtype=self.loc.dtype, device=self.loc.device)
        return self.loc + eps * self.scale

            

Reported by Pylint.

Module 'torch' has no 'Size' member
Error

Line: 66 Column: 36

                      with torch.no_grad():
            return torch.normal(self.loc.expand(shape), self.scale.expand(shape))

    def rsample(self, sample_shape=torch.Size()):
        shape = self._extended_shape(sample_shape)
        eps = _standard_normal(shape, dtype=self.loc.dtype, device=self.loc.device)
        return self.loc + eps * self.scale

    def log_prob(self, value):

            

Reported by Pylint.

Module 'torch' has no 'erf' member; maybe 'err'?
Error

Line: 82 Column: 27

                  def cdf(self, value):
        if self._validate_args:
            self._validate_sample(value)
        return 0.5 * (1 + torch.erf((value - self.loc) * self.scale.reciprocal() / math.sqrt(2)))

    def icdf(self, value):
        return self.loc + self.scale * torch.erfinv(2 * value - 1) * math.sqrt(2)

    def entropy(self):

            

Reported by Pylint.

Module 'torch' has no 'erfinv' member
Error

Line: 85 Column: 40

                      return 0.5 * (1 + torch.erf((value - self.loc) * self.scale.reciprocal() / math.sqrt(2)))

    def icdf(self, value):
        return self.loc + self.scale * torch.erfinv(2 * value - 1) * math.sqrt(2)

    def entropy(self):
        return 0.5 + 0.5 * math.log(2 * math.pi) + torch.log(self.scale)

    @property

            

Reported by Pylint.

Module 'torch' has no 'log' member
Error

Line: 88 Column: 52

                      return self.loc + self.scale * torch.erfinv(2 * value - 1) * math.sqrt(2)

    def entropy(self):
        return 0.5 + 0.5 * math.log(2 * math.pi) + torch.log(self.scale)

    @property
    def _natural_params(self):
        return (self.loc / self.scale.pow(2), -0.5 * self.scale.pow(2).reciprocal())


            

Reported by Pylint.

Module 'torch' has no 'log' member
Error

Line: 95 Column: 45

                      return (self.loc / self.scale.pow(2), -0.5 * self.scale.pow(2).reciprocal())

    def _log_normalizer(self, x, y):
        return -0.25 * x.pow(2) / y + 0.5 * torch.log(-math.pi / y)

            

Reported by Pylint.

Method 'enumerate_support' is abstract in class 'Distribution' but is not overridden
Error

Line: 11 Column: 1

              from torch.distributions.utils import _standard_normal, broadcast_all


class Normal(ExponentialFamily):
    r"""
    Creates a normal (also called Gaussian) distribution parameterized by
    :attr:`loc` and :attr:`scale`.

    Example::

            

Reported by Pylint.

torch/distributions/independent.py
14 issues
Module 'torch' has no 'Size' member
Error

Line: 51 Column: 23

              
    def expand(self, batch_shape, _instance=None):
        new = self._get_checked_instance(Independent, _instance)
        batch_shape = torch.Size(batch_shape)
        new.base_dist = self.base_dist.expand(batch_shape +
                                              self.event_shape[:self.reinterpreted_batch_ndims])
        new.reinterpreted_batch_ndims = self.reinterpreted_batch_ndims
        super(Independent, new).__init__(batch_shape, self.event_shape, validate_args=False)
        new._validate_args = self._validate_args

            

Reported by Pylint.

Module 'torch' has no 'Size' member
Error

Line: 84 Column: 35

                  def variance(self):
        return self.base_dist.variance

    def sample(self, sample_shape=torch.Size()):
        return self.base_dist.sample(sample_shape)

    def rsample(self, sample_shape=torch.Size()):
        return self.base_dist.rsample(sample_shape)


            

Reported by Pylint.

Module 'torch' has no 'Size' member
Error

Line: 87 Column: 36

                  def sample(self, sample_shape=torch.Size()):
        return self.base_dist.sample(sample_shape)

    def rsample(self, sample_shape=torch.Size()):
        return self.base_dist.rsample(sample_shape)

    def log_prob(self, value):
        log_prob = self.base_dist.log_prob(value)
        return _sum_rightmost(log_prob, self.reinterpreted_batch_ndims)

            

Reported by Pylint.

Method 'cdf' is abstract in class 'Distribution' but is not overridden
Error

Line: 7 Column: 1

              from torch.distributions.utils import _sum_rightmost
from typing import Dict

class Independent(Distribution):
    r"""
    Reinterprets some of the batch dims of a distribution as event dims.

    This is mainly useful for changing the shape of the result of
    :meth:`log_prob`. For example to create a diagonal Normal distribution with

            

Reported by Pylint.

Method 'icdf' is abstract in class 'Distribution' but is not overridden
Error

Line: 7 Column: 1

              from torch.distributions.utils import _sum_rightmost
from typing import Dict

class Independent(Distribution):
    r"""
    Reinterprets some of the batch dims of a distribution as event dims.

    This is mainly useful for changing the shape of the result of
    :meth:`log_prob`. For example to create a diagonal Normal distribution with

            

Reported by Pylint.

Access to a protected member _validate_args of a client class
Error

Line: 56 Column: 9

                                                            self.event_shape[:self.reinterpreted_batch_ndims])
        new.reinterpreted_batch_ndims = self.reinterpreted_batch_ndims
        super(Independent, new).__init__(batch_shape, self.event_shape, validate_args=False)
        new._validate_args = self._validate_args
        return new

    @property
    def has_rsample(self):
        return self.base_dist.has_rsample

            

Reported by Pylint.

Method 'support' was expected to be 'property', found it instead as 'method'
Error

Line: 70 Column: 5

                      return self.base_dist.has_enumerate_support

    @constraints.dependent_property
    def support(self):
        result = self.base_dist.support
        if self.reinterpreted_batch_ndims:
            result = constraints.independent(result, self.reinterpreted_batch_ndims)
        return result


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
from torch.distributions import constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import _sum_rightmost
from typing import Dict

class Independent(Distribution):
    r"""
    Reinterprets some of the batch dims of a distribution as event dims.

            

Reported by Pylint.

standard import "from typing import Dict" should be placed before "import torch"
Error

Line: 5 Column: 1

              from torch.distributions import constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import _sum_rightmost
from typing import Dict

class Independent(Distribution):
    r"""
    Reinterprets some of the batch dims of a distribution as event dims.


            

Reported by Pylint.

Line too long (105/100)
Error

Line: 38 Column: 1

              
    def __init__(self, base_distribution, reinterpreted_batch_ndims, validate_args=None):
        if reinterpreted_batch_ndims > len(base_distribution.batch_shape):
            raise ValueError("Expected reinterpreted_batch_ndims <= len(base_distribution.batch_shape), "
                             "actual {} vs {}".format(reinterpreted_batch_ndims,
                                                      len(base_distribution.batch_shape)))
        shape = base_distribution.batch_shape + base_distribution.event_shape
        event_dim = reinterpreted_batch_ndims + len(base_distribution.event_shape)
        batch_shape = shape[:len(shape) - event_dim]

            

Reported by Pylint.

torch/distributed/_sharded_tensor/__init__.py
14 issues
Unable to import '__init__.api'
Error

Line: 5 Column: 1

              
import torch
from torch.distributed._sharding_spec import ShardingSpec
from .api import (
    Shard,
    ShardedTensor,
    ShardedTensorMetadata,
    load_with_process_group,
)

            

Reported by Pylint.

Module 'torch' has no 'strided' member
Error

Line: 16 Column: 16

                      sharding_spec: ShardingSpec,
        *size,
        dtype=None,
        layout=torch.strided,
        requires_grad=False,
        pin_memory=False,
        memory_format=torch.contiguous_format,
        process_group=None,
        init_rrefs=False):

            

Reported by Pylint.

Module 'torch' has no 'contiguous_format' member
Error

Line: 19 Column: 23

                      layout=torch.strided,
        requires_grad=False,
        pin_memory=False,
        memory_format=torch.contiguous_format,
        process_group=None,
        init_rrefs=False):
    """
    Creates an empty :class:`ShardedTensor`. Needs to be called on all ranks in an SPMD fashion.


            

Reported by Pylint.

Access to a protected member _init_from_local_shards of a client class
Error

Line: 93 Column: 12

                  Returns:
        A :class:`ShardedTensor` object handle on this rank
    """
    return ShardedTensor._init_from_local_shards(
        local_shards,
        sharded_tensor_metadata,
        process_group=process_group,
        init_rrefs=init_rrefs
    )

            

Reported by Pylint.

Unused argument 'local_metadata'
Error

Line: 100 Column: 50

                      init_rrefs=init_rrefs
    )

def state_dict_hook(module, destination, prefix, local_metadata):
    """
    Hook to add ShardedTensor to Module's ``state_dict``. Needs to be
    registered to the Module using
    :meth:`torch.nn.Module._register_state_dict_hook`.
    """

            

Reported by Pylint.

Unused argument 'local_metadata'
Error

Line: 108 Column: 58

                  """
    _recurse_update_dict(module, destination, prefix)

def pre_load_state_dict_hook(module, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs):
    """
    Pre-load state dict hook to add ShardedTensor to the module.
    """
    _recurse_update_module(module, state_dict, prefix)


            

Reported by Pylint.

Unused argument 'error_msgs'
Error

Line: 108 Column: 113

                  """
    _recurse_update_dict(module, destination, prefix)

def pre_load_state_dict_hook(module, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs):
    """
    Pre-load state dict hook to add ShardedTensor to the module.
    """
    _recurse_update_module(module, state_dict, prefix)


            

Reported by Pylint.

Unused argument 'unexpected_keys'
Error

Line: 108 Column: 96

                  """
    _recurse_update_dict(module, destination, prefix)

def pre_load_state_dict_hook(module, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs):
    """
    Pre-load state dict hook to add ShardedTensor to the module.
    """
    _recurse_update_module(module, state_dict, prefix)


            

Reported by Pylint.

Unused argument 'missing_keys'
Error

Line: 108 Column: 82

                  """
    _recurse_update_dict(module, destination, prefix)

def pre_load_state_dict_hook(module, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs):
    """
    Pre-load state dict hook to add ShardedTensor to the module.
    """
    _recurse_update_module(module, state_dict, prefix)


            

Reported by Pylint.

Unused argument 'strict'
Error

Line: 108 Column: 74

                  """
    _recurse_update_dict(module, destination, prefix)

def pre_load_state_dict_hook(module, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs):
    """
    Pre-load state dict hook to add ShardedTensor to the module.
    """
    _recurse_update_module(module, state_dict, prefix)


            

Reported by Pylint.

torch/fx/experimental/fx2trt/converters/activation.py
14 issues
Unable to import 'tensorrt'
Error

Line: 3 Column: 1

              import torch
import numpy as np
import tensorrt as trt
from torch.fx.experimental.fx2trt.fx2trt import tensorrt_converter

from .helper_functions import mark_as_int8_layer

def common_activation(network, mod, input_val, activation_type, activation_dyn_range_fn, layer_name):
    layer = network.add_activation(

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 6 Column: 1

              import tensorrt as trt
from torch.fx.experimental.fx2trt.fx2trt import tensorrt_converter

from .helper_functions import mark_as_int8_layer

def common_activation(network, mod, input_val, activation_type, activation_dyn_range_fn, layer_name):
    layer = network.add_activation(
        input=input_val, type=activation_type)
    layer.name = layer_name

            

Reported by Pylint.

Unused argument 'mod'
Error

Line: 8 Column: 32

              
from .helper_functions import mark_as_int8_layer

def common_activation(network, mod, input_val, activation_type, activation_dyn_range_fn, layer_name):
    layer = network.add_activation(
        input=input_val, type=activation_type)
    layer.name = layer_name

    if input_val.dynamic_range:

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
import numpy as np
import tensorrt as trt
from torch.fx.experimental.fx2trt.fx2trt import tensorrt_converter

from .helper_functions import mark_as_int8_layer

def common_activation(network, mod, input_val, activation_type, activation_dyn_range_fn, layer_name):
    layer = network.add_activation(

            

Reported by Pylint.

Too many arguments (6/5)
Error

Line: 8 Column: 1

              
from .helper_functions import mark_as_int8_layer

def common_activation(network, mod, input_val, activation_type, activation_dyn_range_fn, layer_name):
    layer = network.add_activation(
        input=input_val, type=activation_type)
    layer.name = layer_name

    if input_val.dynamic_range:

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 8 Column: 1

              
from .helper_functions import mark_as_int8_layer

def common_activation(network, mod, input_val, activation_type, activation_dyn_range_fn, layer_name):
    layer = network.add_activation(
        input=input_val, type=activation_type)
    layer.name = layer_name

    if input_val.dynamic_range:

            

Reported by Pylint.

Line too long (101/100)
Error

Line: 8 Column: 1

              
from .helper_functions import mark_as_int8_layer

def common_activation(network, mod, input_val, activation_type, activation_dyn_range_fn, layer_name):
    layer = network.add_activation(
        input=input_val, type=activation_type)
    layer.name = layer_name

    if input_val.dynamic_range:

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 22 Column: 1

              
@tensorrt_converter(torch.nn.functional.relu)
@tensorrt_converter(torch.nn.modules.activation.ReLU)
def relu(network, submod, args, kwargs, layer_name):
    # args/kwargs should have already been normalized to kwargs
    assert len(args) == 0
    input_val = kwargs["input"]

    if not isinstance(input_val, trt.tensorrt.ITensor):

            

Reported by Pylint.

Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.
Security

Line: 24
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html

              @tensorrt_converter(torch.nn.modules.activation.ReLU)
def relu(network, submod, args, kwargs, layer_name):
    # args/kwargs should have already been normalized to kwargs
    assert len(args) == 0
    input_val = kwargs["input"]

    if not isinstance(input_val, trt.tensorrt.ITensor):
        raise RuntimeError(f"ReLU received input {input_val} that is not part "
                           "of the TensorRT region!")

            

Reported by Bandit.

Line too long (118/100)
Error

Line: 34 Column: 1

                  def activation_dyn_range_fn(dyn_range):
        return max(0, dyn_range[0]), max(0, dyn_range[1])

    return common_activation(network, submod, input_val, trt.ActivationType.RELU, activation_dyn_range_fn, layer_name)


@tensorrt_converter(torch.nn.modules.activation.Sigmoid)
def sigmoid(network, submod, args, kwargs, layer_name):
    # args/kwargs should have already been normalized to kwargs

            

Reported by Pylint.

torch/csrc/deploy/example/generate_examples.py
14 issues
Unable to import 'torch'
Error

Line: 7 Column: 1

              import argparse
from pathlib import Path

import torch
from torch.package import PackageExporter
from torch.fx import symbolic_trace

try:
    from .examples import Simple, resnet18, MultiReturn, multi_return_metadata, load_library, BatchedModel

            

Reported by Pylint.

Unable to import 'torch.package'
Error

Line: 8 Column: 1

              from pathlib import Path

import torch
from torch.package import PackageExporter
from torch.fx import symbolic_trace

try:
    from .examples import Simple, resnet18, MultiReturn, multi_return_metadata, load_library, BatchedModel
except ImportError:

            

Reported by Pylint.

Unable to import 'torch.fx'
Error

Line: 9 Column: 1

              
import torch
from torch.package import PackageExporter
from torch.fx import symbolic_trace

try:
    from .examples import Simple, resnet18, MultiReturn, multi_return_metadata, load_library, BatchedModel
except ImportError:
    from examples import Simple, resnet18, MultiReturn, multi_return_metadata, load_library, BatchedModel

            

Reported by Pylint.

Redefining name 'e' from outer scope (line 76)
Error

Line: 25 Column: 54

                  name = 'simple_leaf'
    model = SimpleWithLeaf(5, 10)
    graph_module : torch.fx.GraphModule = symbolic_trace(model)
    with PackageExporter(str(p / (name + "_fx"))) as e:
        e.intern("**")
        e.save_pickle("model", "model.pkl", graph_module)

    model_jit = torch.jit.script(model)
    model_jit.save(str(p / (name + "_jit")))

            

Reported by Pylint.

Redefining name 'e' from outer scope (line 76)
Error

Line: 33 Column: 44

                  model_jit.save(str(p / (name + "_jit")))

def save(name, model, model_jit=None, eg=None, featurestore_meta=None):
    with PackageExporter(str(p / name)) as e:
        e.mock("iopath.**")
        e.intern("**")
        e.save_pickle("model", "model.pkl", model)
        if eg:
            e.save_pickle("model", "example.pkl", eg)

            

Reported by Pylint.

TODO(whc) can this name come from buck somehow,
Error

Line: 40 Column: 3

                      if eg:
            e.save_pickle("model", "example.pkl", eg)
        if featurestore_meta:
            # TODO(whc) can this name come from buck somehow,
            # so it's consistent with predictor_config_constants::METADATA_FILE_NAME()?
            e.save_text("extra_files", "metadata.json", featurestore_meta)

    if model_jit:
        model_jit.save(str(p / (name + "_jit")))

            

Reported by Pylint.

Line too long (106/100)
Error

Line: 12 Column: 1

              from torch.fx import symbolic_trace

try:
    from .examples import Simple, resnet18, MultiReturn, multi_return_metadata, load_library, BatchedModel
except ImportError:
    from examples import Simple, resnet18, MultiReturn, multi_return_metadata, load_library, BatchedModel

try:
    from .fx.examples import SimpleWithLeaf

            

Reported by Pylint.

Line too long (105/100)
Error

Line: 14 Column: 1

              try:
    from .examples import Simple, resnet18, MultiReturn, multi_return_metadata, load_library, BatchedModel
except ImportError:
    from examples import Simple, resnet18, MultiReturn, multi_return_metadata, load_library, BatchedModel

try:
    from .fx.examples import SimpleWithLeaf
except ImportError:
    from fx.examples import SimpleWithLeaf

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 21 Column: 1

              except ImportError:
    from fx.examples import SimpleWithLeaf

def generate_fx_example():
    name = 'simple_leaf'
    model = SimpleWithLeaf(5, 10)
    graph_module : torch.fx.GraphModule = symbolic_trace(model)
    with PackageExporter(str(p / (name + "_fx"))) as e:
        e.intern("**")

            

Reported by Pylint.

Variable name "e" doesn't conform to snake_case naming style
Error

Line: 25 Column: 54

                  name = 'simple_leaf'
    model = SimpleWithLeaf(5, 10)
    graph_module : torch.fx.GraphModule = symbolic_trace(model)
    with PackageExporter(str(p / (name + "_fx"))) as e:
        e.intern("**")
        e.save_pickle("model", "model.pkl", graph_module)

    model_jit = torch.jit.script(model)
    model_jit.save(str(p / (name + "_jit")))

            

Reported by Pylint.

torch/distributed/pipeline/sync/copy.py
14 issues
Attempted relative import beyond top-level package
Error

Line: 16 Column: 1

              import torch
from torch import Tensor

from .stream import AbstractStream, current_stream, get_device, record_stream, use_stream, wait_stream

__all__: List[str] = []


Tensors = Sequence[Tensor]

            

Reported by Pylint.

Parameters differ from overridden 'forward' method
Error

Line: 34 Column: 5

                  """Copies tensors on specific streams."""

    @staticmethod
    # type: ignore[override]
    def forward(ctx: Context, prev_stream: AbstractStream, next_stream: AbstractStream, *input,) -> Tensors:
        ctx.prev_stream = prev_stream
        ctx.next_stream = next_stream

        output = []

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 35 Column: 1

              
    @staticmethod
    # type: ignore[override]
    def forward(ctx: Context, prev_stream: AbstractStream, next_stream: AbstractStream, *input,) -> Tensors:
        ctx.prev_stream = prev_stream
        ctx.next_stream = next_stream

        output = []
        output_stream = current_stream(get_device(next_stream))

            

Reported by Pylint.

Parameters differ from overridden 'forward' method
Error

Line: 90 Column: 5

                  """

    @staticmethod
    # type: ignore[override]
    def forward(ctx: Context, prev_stream: AbstractStream, next_stream: AbstractStream, *input) -> Tensors:
        ctx.prev_stream = prev_stream
        ctx.next_stream = next_stream

        wait_stream(next_stream, prev_stream)

            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 91 Column: 1

              
    @staticmethod
    # type: ignore[override]
    def forward(ctx: Context, prev_stream: AbstractStream, next_stream: AbstractStream, *input) -> Tensors:
        ctx.prev_stream = prev_stream
        ctx.next_stream = next_stream

        wait_stream(next_stream, prev_stream)


            

Reported by Pylint.

Line too long (102/100)
Error

Line: 16 Column: 1

              import torch
from torch import Tensor

from .stream import AbstractStream, current_stream, get_device, record_stream, use_stream, wait_stream

__all__: List[str] = []


Tensors = Sequence[Tensor]

            

Reported by Pylint.

Missing class docstring
Error

Line: 25 Column: 1

              

# Common interface between :class:`Copy` and :class:`Wait`.
class Context:
    prev_stream: AbstractStream
    next_stream: AbstractStream


class Copy(torch.autograd.Function):

            

Reported by Pylint.

Too few public methods (0/2)
Error

Line: 25 Column: 1

              

# Common interface between :class:`Copy` and :class:`Wait`.
class Context:
    prev_stream: AbstractStream
    next_stream: AbstractStream


class Copy(torch.autograd.Function):

            

Reported by Pylint.

Line too long (108/100)
Error

Line: 35 Column: 1

              
    @staticmethod
    # type: ignore[override]
    def forward(ctx: Context, prev_stream: AbstractStream, next_stream: AbstractStream, *input,) -> Tensors:
        ctx.prev_stream = prev_stream
        ctx.next_stream = next_stream

        output = []
        output_stream = current_stream(get_device(next_stream))

            

Reported by Pylint.

Variable name "x" doesn't conform to snake_case naming style
Error

Line: 43 Column: 17

                      output_stream = current_stream(get_device(next_stream))

        with use_stream(prev_stream), use_stream(next_stream):
            for x in input:
                if torch.is_tensor(x):
                    y = x.to(get_device(next_stream), non_blocking=True)
                    output.append(y)

                    # 'prev_stream' is not where 'x' has been allocated.

            

Reported by Pylint.

torch/distributed/elastic/rendezvous/etcd_store.py
14 issues
Unable to import 'etcd'
Error

Line: 13 Column: 1

              from base64 import b64decode, b64encode
from typing import Optional

import etcd  # type: ignore[import]

# pyre-ignore[21]: Could not find name `Store` in `torch.distributed`.
from torch.distributed import Store



            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              # Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import datetime
import random
import time

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 21 Column: 1

              
# Delay (sleep) for a small random amount to reduce CAS failures.
# This does not affect correctness, but will reduce requests to etcd server.
def cas_delay():
    time.sleep(random.uniform(0, 0.1))


# pyre-fixme[11]: Annotation `Store` is not defined as a type.
class EtcdStore(Store):

            

Reported by Pylint.

Standard pseudo-random generators are not suitable for security/cryptographic purposes.
Security blacklist

Line: 22
Suggestion: https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b311-random

              # Delay (sleep) for a small random amount to reduce CAS failures.
# This does not affect correctness, but will reduce requests to etcd server.
def cas_delay():
    time.sleep(random.uniform(0, 0.1))


# pyre-fixme[11]: Annotation `Store` is not defined as a type.
class EtcdStore(Store):
    """

            

Reported by Bandit.

Method could be a function
Error

Line: 145 Column: 5

                  # in EtcdStore. Input can be `str` or `bytes`.
    # In case of `str`, utf-8 encoding is assumed.
    #
    def _encode(self, value) -> str:
        if type(value) == bytes:
            return b64encode(value).decode()
        elif type(value) == str:
            return b64encode(value.encode()).decode()
        raise ValueError("Value must be of type str or bytes")

            

Reported by Pylint.

Unnecessary "elif" after "return"
Error

Line: 146 Column: 9

                  # In case of `str`, utf-8 encoding is assumed.
    #
    def _encode(self, value) -> str:
        if type(value) == bytes:
            return b64encode(value).decode()
        elif type(value) == str:
            return b64encode(value.encode()).decode()
        raise ValueError("Value must be of type str or bytes")


            

Reported by Pylint.

Using type() instead of isinstance() for a typecheck.
Error

Line: 146 Column: 12

                  # In case of `str`, utf-8 encoding is assumed.
    #
    def _encode(self, value) -> str:
        if type(value) == bytes:
            return b64encode(value).decode()
        elif type(value) == str:
            return b64encode(value.encode()).decode()
        raise ValueError("Value must be of type str or bytes")


            

Reported by Pylint.

Using type() instead of isinstance() for a typecheck.
Error

Line: 148 Column: 14

                  def _encode(self, value) -> str:
        if type(value) == bytes:
            return b64encode(value).decode()
        elif type(value) == str:
            return b64encode(value.encode()).decode()
        raise ValueError("Value must be of type str or bytes")

    #
    # Decode a base64 string (of type `str` or `bytes`).

            

Reported by Pylint.

Method could be a function
Error

Line: 156 Column: 5

                  # Decode a base64 string (of type `str` or `bytes`).
    # Return type is `bytes`, which is more convenient with the Store interface.
    #
    def _decode(self, value) -> bytes:
        if type(value) == bytes:
            return b64decode(value)
        elif type(value) == str:
            return b64decode(value.encode())
        raise ValueError("Value must be of type str or bytes")

            

Reported by Pylint.

Unnecessary "elif" after "return"
Error

Line: 157 Column: 9

                  # Return type is `bytes`, which is more convenient with the Store interface.
    #
    def _decode(self, value) -> bytes:
        if type(value) == bytes:
            return b64decode(value)
        elif type(value) == str:
            return b64decode(value.encode())
        raise ValueError("Value must be of type str or bytes")


            

Reported by Pylint.

tools/lite_interpreter/gen_selected_mobile_ops_header.py
14 issues
Use of unsafe yaml load. Allows instantiation of arbitrary objects. Consider yaml.safe_load().
Security criptography

Line: 124
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b506_yaml_load.html

                  print("Loading yaml file: ", model_file_name)
    loaded_model = {}
    with open(model_file_name, "rb") as model_file:
        loaded_model = yaml.load(model_file)


    root_operators_set = set(loaded_model)
    print("Writing header file selected_mobile_ops.h: ", parsed_args.output_file_path)
    write_selected_mobile_ops_with_all_dtypes(

            

Reported by Bandit.

Missing module docstring
Error

Line: 1 Column: 1

              #!/usr/bin/env python3
import argparse
import os
from typing import Set
from tools.codegen.selective_build.selector import SelectiveBuilder
from tools.codegen.code_template import CodeTemplate

import yaml


            

Reported by Pylint.

Constant name "if_condition_template_str" doesn't conform to UPPER_CASE naming style
Error

Line: 10 Column: 1

              
import yaml

if_condition_template_str = """if (kernel_tag_sv.compare("$kernel_tag_name") == 0) {
  return $dtype_checks;
}"""
if_condition_template = CodeTemplate(if_condition_template_str)

selected_kernel_dtypes_h_template_str = """#pragma once

            

Reported by Pylint.

Constant name "selected_kernel_dtypes_h_template_str" doesn't conform to UPPER_CASE naming style
Error

Line: 15 Column: 1

              }"""
if_condition_template = CodeTemplate(if_condition_template_str)

selected_kernel_dtypes_h_template_str = """#pragma once
#include <c10/core/ScalarType.h>
#include <c10/util/string_view.h>
#include <c10/macros/Macros.h>

namespace at {

            

Reported by Pylint.

Constant name "selected_mobile_ops_preamble" doesn't conform to UPPER_CASE naming style
Error

Line: 33 Column: 1

              """
selected_kernel_dtypes_h_template = CodeTemplate(selected_kernel_dtypes_h_template_str)

selected_mobile_ops_preamble = """#pragma once
/**
 * Generated by gen_selected_mobile_ops_header.py
 */

"""

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 40 Column: 1

              
"""

def extract_root_operators(selective_builder: SelectiveBuilder) -> Set[str]:
    ops = []
    for (op_name, op) in selective_builder.operators.items():
        if op.is_root_operator:
            ops.append(op_name)
    return set(ops)

            

Reported by Pylint.

Variable name "op" doesn't conform to snake_case naming style
Error

Line: 42 Column: 19

              
def extract_root_operators(selective_builder: SelectiveBuilder) -> Set[str]:
    ops = []
    for (op_name, op) in selective_builder.operators.items():
        if op.is_root_operator:
            ops.append(op_name)
    return set(ops)

def get_selected_kernel_dtypes_code(

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 47 Column: 1

                          ops.append(op_name)
    return set(ops)

def get_selected_kernel_dtypes_code(
        selective_builder: SelectiveBuilder,
) -> str:
    # See https://www.internalfb.com/intern/paste/P153411698/ for an example of the
    # generated code in case all kernel dtypes are selected and in case some kernel
    # dtypes are selected (i.e. both cases).

            

Reported by Pylint.

Line too long (113/100)
Error

Line: 55 Column: 1

                  # dtypes are selected (i.e. both cases).
    #
    body = "return true;"
    if selective_builder.include_all_operators is False and selective_builder.include_all_kernel_dtypes is False:
        body_parts = []
        for kernel_tag, dtypes in selective_builder.kernel_metadata.items():
            conditions = list(map(lambda x: 'scalar_type == at::ScalarType::' + x, dtypes))
            body_parts.append(
                if_condition_template.substitute(

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 74 Column: 1

              # Write the file selected_mobile_ops.h with optionally:
# 1. The selected root operators
# 2. The selected kernel dtypes
def write_selected_mobile_ops(
        output_file_path: str,
        selective_builder: SelectiveBuilder,
) -> None:
    root_ops = extract_root_operators(selective_builder)
    with open(output_file_path, "wb") as out_file:

            

Reported by Pylint.

tools/code_coverage/package/tool/gcc_coverage.py
14 issues
Attempted relative import beyond top-level package
Error

Line: 7 Column: 1

              from typing import Dict

# gcc is only used in oss
from ..oss.utils import get_gcda_files, run_oss_python_test
from ..util.setting import JSON_FOLDER_BASE_DIR, TestType
from ..util.utils import print_log, print_time
from .utils import run_cpp_test



            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 8 Column: 1

              
# gcc is only used in oss
from ..oss.utils import get_gcda_files, run_oss_python_test
from ..util.setting import JSON_FOLDER_BASE_DIR, TestType
from ..util.utils import print_log, print_time
from .utils import run_cpp_test


def update_gzip_dict(gzip_dict: Dict[str, int], file_name: str) -> str:

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 9 Column: 1

              # gcc is only used in oss
from ..oss.utils import get_gcda_files, run_oss_python_test
from ..util.setting import JSON_FOLDER_BASE_DIR, TestType
from ..util.utils import print_log, print_time
from .utils import run_cpp_test


def update_gzip_dict(gzip_dict: Dict[str, int], file_name: str) -> str:
    file_name = file_name.lower()

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 10 Column: 1

              from ..oss.utils import get_gcda_files, run_oss_python_test
from ..util.setting import JSON_FOLDER_BASE_DIR, TestType
from ..util.utils import print_log, print_time
from .utils import run_cpp_test


def update_gzip_dict(gzip_dict: Dict[str, int], file_name: str) -> str:
    file_name = file_name.lower()
    gzip_dict[file_name] = gzip_dict.get(file_name, 0) + 1

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import os
import subprocess
import time
from typing import Dict

# gcc is only used in oss
from ..oss.utils import get_gcda_files, run_oss_python_test
from ..util.setting import JSON_FOLDER_BASE_DIR, TestType
from ..util.utils import print_log, print_time

            

Reported by Pylint.

Consider possible security implications associated with subprocess module.
Security blacklist

Line: 2
Suggestion: https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b404-import-subprocess

              import os
import subprocess
import time
from typing import Dict

# gcc is only used in oss
from ..oss.utils import get_gcda_files, run_oss_python_test
from ..util.setting import JSON_FOLDER_BASE_DIR, TestType
from ..util.utils import print_log, print_time

            

Reported by Bandit.

Missing function or method docstring
Error

Line: 13 Column: 1

              from .utils import run_cpp_test


def update_gzip_dict(gzip_dict: Dict[str, int], file_name: str) -> str:
    file_name = file_name.lower()
    gzip_dict[file_name] = gzip_dict.get(file_name, 0) + 1
    num = gzip_dict[file_name]
    return str(num) + "_" + file_name


            

Reported by Pylint.

Missing function or method docstring
Error

Line: 20 Column: 1

                  return str(num) + "_" + file_name


def run_target(binary_file: str, test_type: TestType) -> None:
    print_log("start run", test_type.value, "test: ", binary_file)
    start_time = time.time()
    assert test_type in {TestType.CPP, TestType.PY}
    if test_type == TestType.CPP:
        run_cpp_test(binary_file)

            

Reported by Pylint.

Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.
Security

Line: 23
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html

              def run_target(binary_file: str, test_type: TestType) -> None:
    print_log("start run", test_type.value, "test: ", binary_file)
    start_time = time.time()
    assert test_type in {TestType.CPP, TestType.PY}
    if test_type == TestType.CPP:
        run_cpp_test(binary_file)
    else:
        run_oss_python_test(binary_file)


            

Reported by Bandit.

Missing function or method docstring
Error

Line: 32 Column: 1

                  print_time(" time: ", start_time)


def export() -> None:
    start_time = time.time()
    # collect .gcda files
    gcda_files = get_gcda_files()
    # file name like utils.cpp may have same name in different folder
    gzip_dict: Dict[str, int] = {}

            

Reported by Pylint.

torch/distributed/optim/__init__.py
14 issues
Unable to import '__init__.functional_adagrad'
Error

Line: 10 Column: 1

              """
import torch
from torch import optim
from .functional_adagrad import _FunctionalAdagrad
from .functional_adam import _FunctionalAdam
from .functional_adamw import _FunctionalAdamW
from .functional_sgd import _FunctionalSGD
from .functional_adadelta import _FunctionalAdadelta
from .functional_rmsprop import _FunctionalRMSprop

            

Reported by Pylint.

Unable to import '__init__.functional_adam'
Error

Line: 11 Column: 1

              import torch
from torch import optim
from .functional_adagrad import _FunctionalAdagrad
from .functional_adam import _FunctionalAdam
from .functional_adamw import _FunctionalAdamW
from .functional_sgd import _FunctionalSGD
from .functional_adadelta import _FunctionalAdadelta
from .functional_rmsprop import _FunctionalRMSprop
from .functional_rprop import _FunctionalRprop

            

Reported by Pylint.

Unable to import '__init__.functional_adamw'
Error

Line: 12 Column: 1

              from torch import optim
from .functional_adagrad import _FunctionalAdagrad
from .functional_adam import _FunctionalAdam
from .functional_adamw import _FunctionalAdamW
from .functional_sgd import _FunctionalSGD
from .functional_adadelta import _FunctionalAdadelta
from .functional_rmsprop import _FunctionalRMSprop
from .functional_rprop import _FunctionalRprop
from .functional_adamax import _FunctionalAdamax

            

Reported by Pylint.

Unable to import '__init__.functional_sgd'
Error

Line: 13 Column: 1

              from .functional_adagrad import _FunctionalAdagrad
from .functional_adam import _FunctionalAdam
from .functional_adamw import _FunctionalAdamW
from .functional_sgd import _FunctionalSGD
from .functional_adadelta import _FunctionalAdadelta
from .functional_rmsprop import _FunctionalRMSprop
from .functional_rprop import _FunctionalRprop
from .functional_adamax import _FunctionalAdamax


            

Reported by Pylint.

Unable to import '__init__.functional_adadelta'
Error

Line: 14 Column: 1

              from .functional_adam import _FunctionalAdam
from .functional_adamw import _FunctionalAdamW
from .functional_sgd import _FunctionalSGD
from .functional_adadelta import _FunctionalAdadelta
from .functional_rmsprop import _FunctionalRMSprop
from .functional_rprop import _FunctionalRprop
from .functional_adamax import _FunctionalAdamax

# dict to map a user passed in optimizer_class to a functional

            

Reported by Pylint.

Unable to import '__init__.functional_rmsprop'
Error

Line: 15 Column: 1

              from .functional_adamw import _FunctionalAdamW
from .functional_sgd import _FunctionalSGD
from .functional_adadelta import _FunctionalAdadelta
from .functional_rmsprop import _FunctionalRMSprop
from .functional_rprop import _FunctionalRprop
from .functional_adamax import _FunctionalAdamax

# dict to map a user passed in optimizer_class to a functional
# optimizer class if we have already defined inside the

            

Reported by Pylint.

Unable to import '__init__.functional_rprop'
Error

Line: 16 Column: 1

              from .functional_sgd import _FunctionalSGD
from .functional_adadelta import _FunctionalAdadelta
from .functional_rmsprop import _FunctionalRMSprop
from .functional_rprop import _FunctionalRprop
from .functional_adamax import _FunctionalAdamax

# dict to map a user passed in optimizer_class to a functional
# optimizer class if we have already defined inside the
# distributed.optim package, this is so that we hide the

            

Reported by Pylint.

Unable to import '__init__.functional_adamax'
Error

Line: 17 Column: 1

              from .functional_adadelta import _FunctionalAdadelta
from .functional_rmsprop import _FunctionalRMSprop
from .functional_rprop import _FunctionalRprop
from .functional_adamax import _FunctionalAdamax

# dict to map a user passed in optimizer_class to a functional
# optimizer class if we have already defined inside the
# distributed.optim package, this is so that we hide the
# functional optimizer to user and still provide the same API.

            

Reported by Pylint.

Unable to import '__init__.optimizer'
Error

Line: 35 Column: 5

              }

if hasattr(torch._C, '_rpc_init'):
    from .optimizer import DistributedOptimizer

from .post_localSGD_optimizer import PostLocalSGDOptimizer
from .zero_redundancy_optimizer import ZeroRedundancyOptimizer

            

Reported by Pylint.

Unable to import '__init__.post_localSGD_optimizer'
Error

Line: 37 Column: 1

              if hasattr(torch._C, '_rpc_init'):
    from .optimizer import DistributedOptimizer

from .post_localSGD_optimizer import PostLocalSGDOptimizer
from .zero_redundancy_optimizer import ZeroRedundancyOptimizer

            

Reported by Pylint.