The following issues were found
torch/utils/backcompat/__init__.py
6 issues
Line: 7
Column: 1
from torch._C import _get_backcompat_keepdim_warn
class Warning(object):
def __init__(self, setter, getter):
self.setter = setter
self.getter = getter
def set_enabled(self, value):
Reported by Pylint.
Line: 1
Column: 1
from torch._C import _set_backcompat_broadcast_warn
from torch._C import _get_backcompat_broadcast_warn
from torch._C import _set_backcompat_keepdim_warn
from torch._C import _get_backcompat_keepdim_warn
class Warning(object):
def __init__(self, setter, getter):
self.setter = setter
Reported by Pylint.
Line: 7
Column: 1
from torch._C import _get_backcompat_keepdim_warn
class Warning(object):
def __init__(self, setter, getter):
self.setter = setter
self.getter = getter
def set_enabled(self, value):
Reported by Pylint.
Line: 7
Column: 1
from torch._C import _get_backcompat_keepdim_warn
class Warning(object):
def __init__(self, setter, getter):
self.setter = setter
self.getter = getter
def set_enabled(self, value):
Reported by Pylint.
Line: 12
Column: 5
self.setter = setter
self.getter = getter
def set_enabled(self, value):
self.setter(value)
def get_enabled(self):
return self.getter()
Reported by Pylint.
Line: 15
Column: 5
def set_enabled(self, value):
self.setter(value)
def get_enabled(self):
return self.getter()
enabled = property(get_enabled, set_enabled)
broadcast_warning = Warning(_set_backcompat_broadcast_warn, _get_backcompat_broadcast_warn)
Reported by Pylint.
torch/utils/data/datapipes/map/combining.py
5 issues
Line: 1
Column: 1
from torch.utils.data import MapDataPipe, functional_datapipe
from typing import Sized, Tuple, TypeVar
T_co = TypeVar('T_co', covariant=True)
@functional_datapipe('concat')
class ConcatMapDataPipe(MapDataPipe):
r""" :class:`ConcatMapDataPipe`.
Reported by Pylint.
Line: 2
Column: 1
from torch.utils.data import MapDataPipe, functional_datapipe
from typing import Sized, Tuple, TypeVar
T_co = TypeVar('T_co', covariant=True)
@functional_datapipe('concat')
class ConcatMapDataPipe(MapDataPipe):
r""" :class:`ConcatMapDataPipe`.
Reported by Pylint.
Line: 4
Column: 1
from torch.utils.data import MapDataPipe, functional_datapipe
from typing import Sized, Tuple, TypeVar
T_co = TypeVar('T_co', covariant=True)
@functional_datapipe('concat')
class ConcatMapDataPipe(MapDataPipe):
r""" :class:`ConcatMapDataPipe`.
Reported by Pylint.
Line: 35
Column: 13
def __getitem__(self, index) -> T_co:
offset = 0
for dp in self.datapipes:
if index - offset < len(dp):
return dp[index - offset]
else:
offset += len(dp)
raise IndexError("Index {} is out of range.".format(index))
Reported by Pylint.
Line: 36
Column: 13
def __getitem__(self, index) -> T_co:
offset = 0
for dp in self.datapipes:
if index - offset < len(dp):
return dp[index - offset]
else:
offset += len(dp)
raise IndexError("Index {} is out of range.".format(index))
Reported by Pylint.
torch/fx/experimental/unification/__init__.py
5 issues
Line: 2
Column: 1
# type: ignore[attr-defined]
from .core import unify, reify # noqa: F403
from .more import unifiable # noqa: F403
from .variable import var, isvar, vars, variables, Var # noqa: F403
Reported by Pylint.
Line: 3
Column: 1
# type: ignore[attr-defined]
from .core import unify, reify # noqa: F403
from .more import unifiable # noqa: F403
from .variable import var, isvar, vars, variables, Var # noqa: F403
Reported by Pylint.
Line: 4
Column: 1
# type: ignore[attr-defined]
from .core import unify, reify # noqa: F403
from .more import unifiable # noqa: F403
from .variable import var, isvar, vars, variables, Var # noqa: F403
Reported by Pylint.
Line: 4
Column: 1
# type: ignore[attr-defined]
from .core import unify, reify # noqa: F403
from .more import unifiable # noqa: F403
from .variable import var, isvar, vars, variables, Var # noqa: F403
Reported by Pylint.
Line: 1
Column: 1
# type: ignore[attr-defined]
from .core import unify, reify # noqa: F403
from .more import unifiable # noqa: F403
from .variable import var, isvar, vars, variables, Var # noqa: F403
Reported by Pylint.
torch/nn/intrinsic/quantized/_reference/modules/linear_relu.py
5 issues
Line: 14
Column: 19
in_features,
out_features,
bias=True,
dtype=torch.qint8):
super().__init__(in_features, out_features, bias, dtype)
def forward(self, x: torch.Tensor) -> torch.Tensor:
x_dequant = x.dequantize()
weight_dequant = self._qweight.dequantize()
Reported by Pylint.
Line: 23
Column: 18
float_result = F.linear(x_dequant, weight_dequant, self._bias)
float_result = F.relu(float_result, inplace=True)
# NEEDFIX: we don't have dtype in the Linear module APIs right now!
result = torch.quantize_per_tensor(
float_result, self.scale, self.zero_point, torch.quint8)
return result
def _get_name(self):
return "QuantizedLinearReLU(Reference)"
Reported by Pylint.
Line: 24
Column: 56
float_result = F.relu(float_result, inplace=True)
# NEEDFIX: we don't have dtype in the Linear module APIs right now!
result = torch.quantize_per_tensor(
float_result, self.scale, self.zero_point, torch.quint8)
return result
def _get_name(self):
return "QuantizedLinearReLU(Reference)"
Reported by Pylint.
Line: 1
Column: 1
import torch
import torch.nn.intrinsic as nni
import torch.nn.quantized._reference as nnqr
import torch.nn.functional as F
class LinearReLU(nnqr.Linear):
_FLOAT_MODULE = nni.LinearReLU
def __init__(
Reported by Pylint.
Line: 6
Column: 1
import torch.nn.quantized._reference as nnqr
import torch.nn.functional as F
class LinearReLU(nnqr.Linear):
_FLOAT_MODULE = nni.LinearReLU
def __init__(
self,
in_features,
Reported by Pylint.
torch/package/_package_unpickler.py
5 issues
Line: 4
Column: 1
import _compat_pickle
import pickle
from .importer import Importer
class PackageUnpickler(pickle._Unpickler): # type: ignore[name-defined]
"""Package-aware unpickler.
Reported by Pylint.
Line: 7
Column: 24
from .importer import Importer
class PackageUnpickler(pickle._Unpickler): # type: ignore[name-defined]
"""Package-aware unpickler.
This behaves the same as a normal unpickler, except it uses `importer` to
find any global names that it encounters while unpickling.
"""
Reported by Pylint.
Line: 1
Column: 1
import _compat_pickle
import pickle
from .importer import Importer
class PackageUnpickler(pickle._Unpickler): # type: ignore[name-defined]
"""Package-aware unpickler.
Reported by Pylint.
Line: 2
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle
import _compat_pickle
import pickle
from .importer import Importer
class PackageUnpickler(pickle._Unpickler): # type: ignore[name-defined]
"""Package-aware unpickler.
Reported by Bandit.
Line: 2
Column: 1
import _compat_pickle
import pickle
from .importer import Importer
class PackageUnpickler(pickle._Unpickler): # type: ignore[name-defined]
"""Package-aware unpickler.
Reported by Pylint.
torch/testing/__init__.py
5 issues
Line: 1
Column: 1
from ._core import * # noqa: F403
from ._asserts import * # noqa: F403
from ._check_kernel_launches import * # noqa: F403
from ._deprecated import * # noqa: F403
Reported by Pylint.
Line: 2
Column: 1
from ._core import * # noqa: F403
from ._asserts import * # noqa: F403
from ._check_kernel_launches import * # noqa: F403
from ._deprecated import * # noqa: F403
Reported by Pylint.
Line: 3
Column: 1
from ._core import * # noqa: F403
from ._asserts import * # noqa: F403
from ._check_kernel_launches import * # noqa: F403
from ._deprecated import * # noqa: F403
Reported by Pylint.
Line: 4
Column: 1
from ._core import * # noqa: F403
from ._asserts import * # noqa: F403
from ._check_kernel_launches import * # noqa: F403
from ._deprecated import * # noqa: F403
Reported by Pylint.
Line: 1
Column: 1
from ._core import * # noqa: F403
from ._asserts import * # noqa: F403
from ._check_kernel_launches import * # noqa: F403
from ._deprecated import * # noqa: F403
Reported by Pylint.
torch/utils/data/datapipes/__init__.py
5 issues
Line: 1
Column: 1
from . import iter
from . import map
Reported by Pylint.
Line: 1
Column: 1
from . import iter
from . import map
Reported by Pylint.
Line: 2
Column: 1
from . import iter
from . import map
Reported by Pylint.
Line: 2
Column: 1
from . import iter
from . import map
Reported by Pylint.
Line: 1
Column: 1
from . import iter
from . import map
Reported by Pylint.
torch/utils/benchmark/op_fuzzers/sparse_binary.py
5 issues
Line: 16
Column: 36
class BinaryOpSparseFuzzer(Fuzzer):
def __init__(self, seed, dtype=torch.float32, cuda=False):
super().__init__(
parameters=[
# Dimensionality of x and y. (e.g. 1D, 2D, or 3D.)
FuzzedParameter("dim_parameter", distribution={1: 0.3, 2: 0.4, 3: 0.3}, strict=True),
FuzzedParameter(
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import torch
from torch.utils.benchmark import Fuzzer, FuzzedParameter, ParameterAlias, FuzzedSparseTensor
_MIN_DIM_SIZE = 16
_MAX_DIM_SIZE = 16 * 1024 ** 2
_POW_TWO_SIZES = tuple(2 ** i for i in range(
Reported by Pylint.
Line: 15
Column: 1
))
class BinaryOpSparseFuzzer(Fuzzer):
def __init__(self, seed, dtype=torch.float32, cuda=False):
super().__init__(
parameters=[
# Dimensionality of x and y. (e.g. 1D, 2D, or 3D.)
FuzzedParameter("dim_parameter", distribution={1: 0.3, 2: 0.4, 3: 0.3}, strict=True),
Reported by Pylint.
Line: 20
Column: 1
super().__init__(
parameters=[
# Dimensionality of x and y. (e.g. 1D, 2D, or 3D.)
FuzzedParameter("dim_parameter", distribution={1: 0.3, 2: 0.4, 3: 0.3}, strict=True),
FuzzedParameter(
name="sparse_dim",
distribution={1: 0.4, 2: 0.4, 3: 0.2},
strict=True
),
Reported by Pylint.
Line: 77
Column: 1
distribution={True: 0.5, False: 0.5},
),
# Repeatable entropy for downstream applications.
FuzzedParameter(name="random_value", minval=0, maxval=2 ** 32 - 1, distribution="uniform"),
],
tensors=[
FuzzedSparseTensor(
name="x",
size=("k0", "k1", "k2"),
Reported by Pylint.
torch/utils/benchmark/op_fuzzers/sparse_unary.py
5 issues
Line: 15
Column: 36
))
class UnaryOpSparseFuzzer(Fuzzer):
def __init__(self, seed, dtype=torch.float32, cuda=False):
super().__init__(
parameters=[
# Sparse dim parameter of x. (e.g. 1D, 2D, or 3D.)
FuzzedParameter("dim_parameter", distribution={1: 0.3, 2: 0.4, 3: 0.3}, strict=True),
FuzzedParameter(
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import torch
from torch.utils.benchmark import Fuzzer, FuzzedParameter, ParameterAlias, FuzzedSparseTensor
_MIN_DIM_SIZE = 16
_MAX_DIM_SIZE = 16 * 1024 ** 2
_POW_TWO_SIZES = tuple(2 ** i for i in range(
Reported by Pylint.
Line: 14
Column: 1
int(np.log2(_MAX_DIM_SIZE)) + 1,
))
class UnaryOpSparseFuzzer(Fuzzer):
def __init__(self, seed, dtype=torch.float32, cuda=False):
super().__init__(
parameters=[
# Sparse dim parameter of x. (e.g. 1D, 2D, or 3D.)
FuzzedParameter("dim_parameter", distribution={1: 0.3, 2: 0.4, 3: 0.3}, strict=True),
Reported by Pylint.
Line: 19
Column: 1
super().__init__(
parameters=[
# Sparse dim parameter of x. (e.g. 1D, 2D, or 3D.)
FuzzedParameter("dim_parameter", distribution={1: 0.3, 2: 0.4, 3: 0.3}, strict=True),
FuzzedParameter(
name="sparse_dim",
distribution={1: 0.4, 2: 0.4, 3: 0.2},
strict=True
),
Reported by Pylint.
Line: 65
Column: 1
name="coalesced",
distribution={True: 0.5, False: 0.5},
),
FuzzedParameter(name="random_value", minval=0, maxval=2 ** 32 - 1, distribution="uniform"),
],
tensors=[
FuzzedSparseTensor(
name="x",
size=("k0", "k1", "k2"),
Reported by Pylint.
torch/nn/modules/utils.py
5 issues
Line: 1
Column: 1
import collections
from itertools import repeat
from typing import List, Dict, Any
def _ntuple(n):
def parse(x):
if isinstance(x, collections.abc.Iterable):
return tuple(x)
Reported by Pylint.
Line: 6
Column: 1
from typing import List, Dict, Any
def _ntuple(n):
def parse(x):
if isinstance(x, collections.abc.Iterable):
return tuple(x)
return tuple(repeat(x, n))
Reported by Pylint.
Line: 7
Column: 5
def _ntuple(n):
def parse(x):
if isinstance(x, collections.abc.Iterable):
return tuple(x)
return tuple(repeat(x, n))
return parse
Reported by Pylint.
Line: 21
Column: 1
_quadruple = _ntuple(4)
def _reverse_repeat_tuple(t, n):
r"""Reverse the order of `t` and repeat each element for `n` times.
This can be used to translate padding arg used by Conv and Pooling modules
to the ones used by `F.pad`.
"""
Reported by Pylint.
Line: 21
Column: 1
_quadruple = _ntuple(4)
def _reverse_repeat_tuple(t, n):
r"""Reverse the order of `t` and repeat each element for `n` times.
This can be used to translate padding arg used by Conv and Pooling modules
to the ones used by `F.pad`.
"""
Reported by Pylint.