The following issues were found
torch/nn/utils/clip_grad.py
12 issues
Line: 36
Column: 16
max_norm = float(max_norm)
norm_type = float(norm_type)
if len(parameters) == 0:
return torch.tensor(0.)
device = parameters[0].grad.device
if norm_type == inf:
norms = [p.grad.detach().abs().max().to(device) for p in parameters]
total_norm = norms[0] if len(norms) == 1 else torch.max(torch.stack(norms))
else:
Reported by Pylint.
Line: 40
Column: 65
device = parameters[0].grad.device
if norm_type == inf:
norms = [p.grad.detach().abs().max().to(device) for p in parameters]
total_norm = norms[0] if len(norms) == 1 else torch.max(torch.stack(norms))
else:
total_norm = torch.norm(torch.stack([torch.norm(p.grad.detach(), norm_type).to(device) for p in parameters]), norm_type)
if error_if_nonfinite and torch.logical_or(total_norm.isnan(), total_norm.isinf()):
raise RuntimeError(
f'The total norm of order {norm_type} for gradients from '
Reported by Pylint.
Line: 40
Column: 55
device = parameters[0].grad.device
if norm_type == inf:
norms = [p.grad.detach().abs().max().to(device) for p in parameters]
total_norm = norms[0] if len(norms) == 1 else torch.max(torch.stack(norms))
else:
total_norm = torch.norm(torch.stack([torch.norm(p.grad.detach(), norm_type).to(device) for p in parameters]), norm_type)
if error_if_nonfinite and torch.logical_or(total_norm.isnan(), total_norm.isinf()):
raise RuntimeError(
f'The total norm of order {norm_type} for gradients from '
Reported by Pylint.
Line: 42
Column: 33
norms = [p.grad.detach().abs().max().to(device) for p in parameters]
total_norm = norms[0] if len(norms) == 1 else torch.max(torch.stack(norms))
else:
total_norm = torch.norm(torch.stack([torch.norm(p.grad.detach(), norm_type).to(device) for p in parameters]), norm_type)
if error_if_nonfinite and torch.logical_or(total_norm.isnan(), total_norm.isinf()):
raise RuntimeError(
f'The total norm of order {norm_type} for gradients from '
'`parameters` is non-finite, so it cannot be clipped. To disable '
'this error and scale the gradients by the non-finite norm anyway, '
Reported by Pylint.
Line: 43
Column: 31
total_norm = norms[0] if len(norms) == 1 else torch.max(torch.stack(norms))
else:
total_norm = torch.norm(torch.stack([torch.norm(p.grad.detach(), norm_type).to(device) for p in parameters]), norm_type)
if error_if_nonfinite and torch.logical_or(total_norm.isnan(), total_norm.isinf()):
raise RuntimeError(
f'The total norm of order {norm_type} for gradients from '
'`parameters` is non-finite, so it cannot be clipped. To disable '
'this error and scale the gradients by the non-finite norm anyway, '
'set `error_if_nonfinite=False`')
Reported by Pylint.
Line: 53
Column: 25
# Note: multiplying by the clamped coef is redundant when the coef is clamped to 1, but doing so
# avoids a `if clip_coef < 1:` conditional which can require a CPU <=> device synchronization
# when the gradients do not reside in CPU memory.
clip_coef_clamped = torch.clamp(clip_coef, max=1.0)
for p in parameters:
p.grad.detach().mul_(clip_coef_clamped.to(p.grad.device))
return total_norm
Reported by Pylint.
Line: 1
Column: 1
import warnings
import torch
from torch._six import inf
from typing import Union, Iterable
_tensor_or_tensors = Union[torch.Tensor, Iterable[torch.Tensor]]
def clip_grad_norm_(
Reported by Pylint.
Line: 4
Column: 1
import warnings
import torch
from torch._six import inf
from typing import Union, Iterable
_tensor_or_tensors = Union[torch.Tensor, Iterable[torch.Tensor]]
def clip_grad_norm_(
Reported by Pylint.
Line: 6
Column: 1
from torch._six import inf
from typing import Union, Iterable
_tensor_or_tensors = Union[torch.Tensor, Iterable[torch.Tensor]]
def clip_grad_norm_(
parameters: _tensor_or_tensors, max_norm: float, norm_type: float = 2.0,
error_if_nonfinite: bool = False) -> torch.Tensor:
Reported by Pylint.
Line: 42
Column: 1
norms = [p.grad.detach().abs().max().to(device) for p in parameters]
total_norm = norms[0] if len(norms) == 1 else torch.max(torch.stack(norms))
else:
total_norm = torch.norm(torch.stack([torch.norm(p.grad.detach(), norm_type).to(device) for p in parameters]), norm_type)
if error_if_nonfinite and torch.logical_or(total_norm.isnan(), total_norm.isinf()):
raise RuntimeError(
f'The total norm of order {norm_type} for gradients from '
'`parameters` is non-finite, so it cannot be clipped. To disable '
'this error and scale the gradients by the non-finite norm anyway, '
Reported by Pylint.
torch/nn/qat/modules/linear.py
12 issues
Line: 31
Column: 23
self.qconfig = qconfig
self.weight_fake_quant = qconfig.weight(factory_kwargs=factory_kwargs)
def forward(self, input):
return F.linear(input, self.weight_fake_quant(self.weight), self.bias)
@classmethod
def from_float(cls, mod):
r"""Create a qat module from a float module or qparams_dict
Reported by Pylint.
Line: 1
Column: 1
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.intrinsic import LinearReLU
class Linear(nn.Linear):
r"""
A linear module attached with FakeQuantize modules for weight,
used for quantization aware training.
Reported by Pylint.
Line: 23
Column: 5
"""
_FLOAT_MODULE = nn.Linear
def __init__(self, in_features, out_features, bias=True,
qconfig=None, device=None, dtype=None) -> None:
factory_kwargs = {'device': device, 'dtype': dtype}
super().__init__(in_features, out_features, bias, **factory_kwargs)
assert qconfig, 'qconfig must be provided for QAT module'
self.qconfig = qconfig
Reported by Pylint.
Line: 27
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
qconfig=None, device=None, dtype=None) -> None:
factory_kwargs = {'device': device, 'dtype': dtype}
super().__init__(in_features, out_features, bias, **factory_kwargs)
assert qconfig, 'qconfig must be provided for QAT module'
self.qconfig = qconfig
self.weight_fake_quant = qconfig.weight(factory_kwargs=factory_kwargs)
def forward(self, input):
return F.linear(input, self.weight_fake_quant(self.weight), self.bias)
Reported by Bandit.
Line: 41
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
Args: `mod` a float module, either produced by torch.quantization utilities
or directly from user
"""
assert type(mod) == cls._FLOAT_MODULE, ' qat.' + cls.__name__ + '.from_float only works for ' + \
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == LinearReLU:
mod = mod[0]
Reported by Bandit.
Line: 41
Column: 16
Args: `mod` a float module, either produced by torch.quantization utilities
or directly from user
"""
assert type(mod) == cls._FLOAT_MODULE, ' qat.' + cls.__name__ + '.from_float only works for ' + \
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == LinearReLU:
mod = mod[0]
Reported by Pylint.
Line: 41
Column: 1
Args: `mod` a float module, either produced by torch.quantization utilities
or directly from user
"""
assert type(mod) == cls._FLOAT_MODULE, ' qat.' + cls.__name__ + '.from_float only works for ' + \
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == LinearReLU:
mod = mod[0]
Reported by Pylint.
Line: 43
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
"""
assert type(mod) == cls._FLOAT_MODULE, ' qat.' + cls.__name__ + '.from_float only works for ' + \
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == LinearReLU:
mod = mod[0]
qconfig = mod.qconfig
Reported by Bandit.
Line: 44
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert type(mod) == cls._FLOAT_MODULE, ' qat.' + cls.__name__ + '.from_float only works for ' + \
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == LinearReLU:
mod = mod[0]
qconfig = mod.qconfig
qat_linear = cls(mod.in_features, mod.out_features, bias=mod.bias is not None, qconfig=qconfig)
Reported by Bandit.
Line: 45
Column: 12
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == LinearReLU:
mod = mod[0]
qconfig = mod.qconfig
qat_linear = cls(mod.in_features, mod.out_features, bias=mod.bias is not None, qconfig=qconfig)
qat_linear.weight = mod.weight
Reported by Pylint.
torch/utils/benchmark/examples/sparse/fuzzer.py
12 issues
Line: 8
Column: 1
import sys
import torch.utils.benchmark as benchmark_utils
def main():
add_fuzzer = benchmark_utils.Fuzzer(
parameters=[
[
Reported by Pylint.
Line: 60
Column: 9
for i, (tensors, tensor_properties, _) in enumerate(add_fuzzer.take(n=n)):
x = tensors["x"]
y = tensors["y"]
shape = ", ".join(tuple(f'{i:>4}' for i in x.shape))
x_tensor_properties = tensor_properties["x"]
description = "".join([
f"| {shape:<20} | ",
f"{x_tensor_properties['sparsity']:>9.2f} | ",
Reported by Pylint.
Line: 77
Column: 45
description=description,
)
measurements.append(timer.blocked_autorange(min_run_time=0.1))
measurements[-1].metadata = {"nnz": x._nnz()}
print(f"\r{i + 1} / {n}", end="")
sys.stdout.flush()
print()
# More string munging to make pretty output.
Reported by Pylint.
Line: 10
Column: 1
import torch.utils.benchmark as benchmark_utils
def main():
add_fuzzer = benchmark_utils.Fuzzer(
parameters=[
[
benchmark_utils.FuzzedParameter(
name=f"k{i}",
Reported by Pylint.
Line: 10
Column: 1
import torch.utils.benchmark as benchmark_utils
def main():
add_fuzzer = benchmark_utils.Fuzzer(
parameters=[
[
benchmark_utils.FuzzedParameter(
name=f"k{i}",
Reported by Pylint.
Line: 42
Column: 26
[
benchmark_utils.FuzzedSparseTensor(
name=name,
size=tuple([f"k{i}" for i in range(3)]),
min_elements=64 * 1024,
max_elements=128 * 1024,
sparse_dim="sparse_dim",
density="density",
dim_parameter="dim_parameter",
Reported by Pylint.
Line: 55
Column: 5
seed=0,
)
n = 100
measurements = []
for i, (tensors, tensor_properties, _) in enumerate(add_fuzzer.take(n=n)):
x = tensors["x"]
y = tensors["y"]
Reported by Pylint.
Line: 59
Column: 9
measurements = []
for i, (tensors, tensor_properties, _) in enumerate(add_fuzzer.take(n=n)):
x = tensors["x"]
y = tensors["y"]
shape = ", ".join(tuple(f'{i:>4}' for i in x.shape))
x_tensor_properties = tensor_properties["x"]
description = "".join([
f"| {shape:<20} | ",
Reported by Pylint.
Line: 60
Column: 9
for i, (tensors, tensor_properties, _) in enumerate(add_fuzzer.take(n=n)):
x = tensors["x"]
y = tensors["y"]
shape = ", ".join(tuple(f'{i:>4}' for i in x.shape))
x_tensor_properties = tensor_properties["x"]
description = "".join([
f"| {shape:<20} | ",
f"{x_tensor_properties['sparsity']:>9.2f} | ",
Reported by Pylint.
Line: 85
Column: 5
# More string munging to make pretty output.
print(f"Average attemts per valid config: {1. / (1. - add_fuzzer.rejection_rate):.1f}")
def time_fn(m):
return m.mean / m.metadata["nnz"]
measurements.sort(key=time_fn)
template = f"{{:>6}}{' ' * 16} Shape{' ' * 17}\
Reported by Pylint.
torch/utils/benchmark/examples/compare.py
12 issues
Line: 60
Column: 22
stmt=stmt,
globals={
"torch": torch if branch == "master" else FauxTorch(torch, overhead_ns),
"x": torch.ones((size, 4)),
"y": torch.ones((1, 4)),
"zero": torch.zeros(()),
},
label=label,
sub_label=sub_label,
Reported by Pylint.
Line: 61
Column: 22
globals={
"torch": torch if branch == "master" else FauxTorch(torch, overhead_ns),
"x": torch.ones((size, 4)),
"y": torch.ones((1, 4)),
"zero": torch.zeros(()),
},
label=label,
sub_label=sub_label,
description=f"size: {size}",
Reported by Pylint.
Line: 62
Column: 25
"torch": torch if branch == "master" else FauxTorch(torch, overhead_ns),
"x": torch.ones((size, 4)),
"y": torch.ones((1, 4)),
"zero": torch.zeros(()),
},
label=label,
sub_label=sub_label,
description=f"size: {size}",
env=branch,
Reported by Pylint.
Line: 85
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b301-pickle
print()
comparison = benchmark_utils.Compare([
pickle.loads(i) for i in serialized_results
])
print("== Unformatted " + "=" * 80 + "\n" + "/" * 95 + "\n")
comparison.print()
Reported by Bandit.
Line: 6
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle
$ python -m examples.compare
"""
import pickle
import sys
import time
import torch
Reported by Bandit.
Line: 15
Column: 1
import torch.utils.benchmark as benchmark_utils
class FauxTorch(object):
"""Emulate different versions of pytorch.
In normal circumstances this would be done with multiple processes
writing serialized measurements, but this simplifies that model to
make the example clearer.
Reported by Pylint.
Line: 26
Column: 5
self._real_torch = real_torch
self._extra_ns_per_element = extra_ns_per_element
def extra_overhead(self, result):
# time.sleep has a ~65 us overhead, so only fake a
# per-element overhead if numel is large enough.
numel = int(result.numel())
if numel > 5000:
time.sleep(numel * self._extra_ns_per_element * 1e-9)
Reported by Pylint.
Line: 34
Column: 5
time.sleep(numel * self._extra_ns_per_element * 1e-9)
return result
def add(self, *args, **kwargs):
return self.extra_overhead(self._real_torch.add(*args, **kwargs))
def mul(self, *args, **kwargs):
return self.extra_overhead(self._real_torch.mul(*args, **kwargs))
Reported by Pylint.
Line: 37
Column: 5
def add(self, *args, **kwargs):
return self.extra_overhead(self._real_torch.add(*args, **kwargs))
def mul(self, *args, **kwargs):
return self.extra_overhead(self._real_torch.mul(*args, **kwargs))
def cat(self, *args, **kwargs):
return self.extra_overhead(self._real_torch.cat(*args, **kwargs))
Reported by Pylint.
Line: 40
Column: 5
def mul(self, *args, **kwargs):
return self.extra_overhead(self._real_torch.mul(*args, **kwargs))
def cat(self, *args, **kwargs):
return self.extra_overhead(self._real_torch.cat(*args, **kwargs))
def matmul(self, *args, **kwargs):
return self.extra_overhead(self._real_torch.matmul(*args, **kwargs))
Reported by Pylint.
torch/package/file_structure_representation.py
12 issues
Line: 4
Column: 1
# -*- coding: utf-8 -*-
from typing import Dict, List
from .glob_group import GlobPattern, GlobGroup
class Directory:
"""A file structure representation. Organized as Directory nodes that have lists of
their Directory children. Directories for a package are created by calling
Reported by Pylint.
Line: 4
Column: 1
# -*- coding: utf-8 -*-
from typing import Dict, List
from .glob_group import GlobPattern, GlobGroup
class Directory:
"""A file structure representation. Organized as Directory nodes that have lists of
their Directory children. Directories for a package are created by calling
Reported by Pylint.
Line: 32
Column: 16
dir_name = dirs[0]
if dir_name not in self.children:
self.children[dir_name] = Directory(dir_name, True)
return self.children[dir_name]._get_dir(dirs[1:])
def _add_file(self, file_path: str):
"""Adds a file to a Directory.
Args:
Reported by Pylint.
Line: 42
Column: 9
other paths items are added as directories.
"""
*dirs, file = file_path.split("/")
dir = self._get_dir(dirs)
dir.children[file] = Directory(file, False)
def has_file(self, filename: str) -> bool:
"""Checks if a file is present in a :class:`Directory`.
Reported by Pylint.
Line: 96
Column: 17
for index, key in enumerate(sorted(dir_keys)):
if (index == len(dir_keys) - 1) and len(file_keys) == 0:
self.children[key]._stringify_tree(str_list, preamble, last)
else:
self.children[key]._stringify_tree(str_list, preamble, tee)
for index, file in enumerate(sorted(file_keys)):
pointer = last if (index == len(file_keys) - 1) else tee
str_list.append(f"{preamble}{pointer}{file}\n")
Reported by Pylint.
Line: 98
Column: 17
if (index == len(dir_keys) - 1) and len(file_keys) == 0:
self.children[key]._stringify_tree(str_list, preamble, last)
else:
self.children[key]._stringify_tree(str_list, preamble, tee)
for index, file in enumerate(sorted(file_keys)):
pointer = last if (index == len(file_keys) - 1) else tee
str_list.append(f"{preamble}{pointer}{file}\n")
Reported by Pylint.
Line: 131
Column: 13
top_dir = Directory(filename, True)
for file in file_list:
if glob_pattern.matches(file):
top_dir._add_file(file)
return top_dir
Reported by Pylint.
Line: 1
Column: 1
# -*- coding: utf-8 -*-
from typing import Dict, List
from .glob_group import GlobPattern, GlobGroup
class Directory:
"""A file structure representation. Organized as Directory nodes that have lists of
their Directory children. Directories for a package are created by calling
Reported by Pylint.
Line: 57
Column: 13
child = lineage[0]
grandchildren = lineage[1] if len(lineage) > 1 else None
if child in self.children.keys():
if grandchildren is None:
return True
else:
return self.children[child].has_file(grandchildren)
return False
Reported by Pylint.
Line: 118
Column: 1
file_list (List[str]): List of files to add to the top-level directory.
include (Union[List[str], str]): An optional pattern that limits what is included from the file_list to
files whose name matches the pattern.
exclude (Union[List[str], str]): An optional pattern that excludes files whose name match the pattern.
Returns:
Reported by Pylint.
torch/fx/experimental/unification/multipledispatch/utils.py
12 issues
Line: 103
Column: 14
return d
def typename(type):
"""Get the name of `type`.
Parameters
----------
type : Union[Type, Tuple[Type]]
Returns
Reported by Pylint.
Line: 1
Column: 1
from collections import OrderedDict
def raises(err, lamda):
try:
lamda()
return False
except err:
return True
Reported by Pylint.
Line: 4
Column: 1
from collections import OrderedDict
def raises(err, lamda):
try:
lamda()
return False
except err:
return True
Reported by Pylint.
Line: 12
Column: 1
return True
def expand_tuples(L):
"""
>>> expand_tuples([1, (2, 3)])
[(1, 2), (1, 3)]
>>> expand_tuples([1, 2])
[(1, 2)]
Reported by Pylint.
Line: 19
Column: 5
>>> expand_tuples([1, 2])
[(1, 2)]
"""
if not L:
return [()]
elif not isinstance(L[0], tuple):
rest = expand_tuples(L[1:])
return [(L[0],) + t for t in rest]
else:
Reported by Pylint.
Line: 47
Column: 5
incoming_edges = reverse_dict(edges)
incoming_edges = OrderedDict((k, set(val))
for k, val in incoming_edges.items())
S = OrderedDict.fromkeys(v for v in edges if v not in incoming_edges)
L = []
while S:
n, _ = S.popitem()
L.append(n)
Reported by Pylint.
Line: 48
Column: 5
incoming_edges = OrderedDict((k, set(val))
for k, val in incoming_edges.items())
S = OrderedDict.fromkeys(v for v in edges if v not in incoming_edges)
L = []
while S:
n, _ = S.popitem()
L.append(n)
for m in edges.get(n, ()):
Reported by Pylint.
Line: 51
Column: 9
L = []
while S:
n, _ = S.popitem()
L.append(n)
for m in edges.get(n, ()):
assert n in incoming_edges[m]
incoming_edges[m].remove(n)
if not incoming_edges[m]:
Reported by Pylint.
Line: 53
Column: 13
while S:
n, _ = S.popitem()
L.append(n)
for m in edges.get(n, ()):
assert n in incoming_edges[m]
incoming_edges[m].remove(n)
if not incoming_edges[m]:
S[m] = None
if any(incoming_edges.get(v, None) for v in edges):
Reported by Pylint.
Line: 54
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
n, _ = S.popitem()
L.append(n)
for m in edges.get(n, ()):
assert n in incoming_edges[m]
incoming_edges[m].remove(n)
if not incoming_edges[m]:
S[m] = None
if any(incoming_edges.get(v, None) for v in edges):
raise ValueError("Input has cycles")
Reported by Bandit.
torch/special/__init__.py
11 issues
Line: 582
Column: 1
tensor([ 0.9186, 0.8631, -0.0259, -0.1300])
""".format(**common_args))
round = _add_docstr(_special.special_round,
r"""
round(input, *, out=None) -> Tensor
Alias for :func:`torch.round`.
""")
Reported by Pylint.
Line: 1
Column: 1
import sys
import torch
from torch._C import _add_docstr, _special # type: ignore[attr-defined]
from torch._torch_docs import common_args, multi_dim_common
Tensor = torch.Tensor
entr = _add_docstr(_special.special_entr,
Reported by Pylint.
Line: 419
Column: 1
r"""
i0(input, *, out=None) -> Tensor
Computes the zeroth order modified Bessel function of the first kind for each element of :attr:`input`.
.. math::
\text{out}_{i} = I_0(\text{input}_{i}) = \sum_{k=0}^{\infty} \frac{(\text{input}_{i}^2/4)^k}{(k!)^2}
""" + r"""
Reported by Pylint.
Line: 422
Column: 1
Computes the zeroth order modified Bessel function of the first kind for each element of :attr:`input`.
.. math::
\text{out}_{i} = I_0(\text{input}_{i}) = \sum_{k=0}^{\infty} \frac{(\text{input}_{i}^2/4)^k}{(k!)^2}
""" + r"""
Args:
input (Tensor): the input tensor
Reported by Pylint.
Line: 441
Column: 1
i0e = _add_docstr(_special.special_i0e,
r"""
i0e(input, *, out=None) -> Tensor
Computes the exponentially scaled zeroth order modified Bessel function of the first kind (as defined below)
for each element of :attr:`input`.
.. math::
\text{out}_{i} = \exp(-|x|) * i0(x) = \exp(-|x|) * \sum_{k=0}^{\infty} \frac{(\text{input}_{i}^2/4)^k}{(k!)^2}
Reported by Pylint.
Line: 445
Column: 1
for each element of :attr:`input`.
.. math::
\text{out}_{i} = \exp(-|x|) * i0(x) = \exp(-|x|) * \sum_{k=0}^{\infty} \frac{(\text{input}_{i}^2/4)^k}{(k!)^2}
""" + r"""
Args:
{input}
Reported by Pylint.
Line: 466
Column: 1
for each element of :attr:`input`.
.. math::
\text{out}_{i} = \frac{(\text{input}_{i})}{2} * \sum_{k=0}^{\infty} \frac{(\text{input}_{i}^2/4)^k}{(k!) * (k+1)!}
""" + r"""
Args:
{input}
Reported by Pylint.
Line: 483
Column: 1
i1e = _add_docstr(_special.special_i1e,
r"""
i1e(input, *, out=None) -> Tensor
Computes the exponentially scaled first order modified Bessel function of the first kind (as defined below)
for each element of :attr:`input`.
.. math::
\text{out}_{i} = \exp(-|x|) * i1(x) =
\exp(-|x|) * \frac{(\text{input}_{i})}{2} * \sum_{k=0}^{\infty} \frac{(\text{input}_{i}^2/4)^k}{(k!) * (k+1)!}
Reported by Pylint.
Line: 488
Column: 1
.. math::
\text{out}_{i} = \exp(-|x|) * i1(x) =
\exp(-|x|) * \frac{(\text{input}_{i})}{2} * \sum_{k=0}^{\infty} \frac{(\text{input}_{i}^2/4)^k}{(k!) * (k+1)!}
""" + r"""
Args:
{input}
Reported by Pylint.
Line: 655
Column: 1
:math:`p` element-wise, given by
.. math::
\log(\Gamma_{p}(a)) = C + \displaystyle \sum_{i=1}^{p} \log\left(\Gamma\left(a - \frac{i - 1}{2}\right)\right)
where :math:`C = \log(\pi) \times \frac{p (p - 1)}{4}` and :math:`\Gamma(\cdot)` is the Gamma function.
All elements must be greater than :math:`\frac{p - 1}{2}`, otherwise an error would be thrown.
""" + """
Reported by Pylint.
torch/utils/data/datapipes/iter/combinatorics.py
11 issues
Line: 98
Column: 3
return val
def __iter__(self) -> Iterator[T_co]:
# TODO: Buffer is global, should be per __iter__ !!!
for x in self.datapipe:
if len(self._buffer) == self.buffer_size:
yield self.buffer_replace(x)
else:
self._buffer.append(x)
Reported by Pylint.
Line: 1
Column: 1
import random
from torch.utils.data import IterDataPipe, Sampler, SequentialSampler, functional_datapipe
from typing import TypeVar, Type, Iterator, Sized, Optional, Tuple, Dict, List
T_co = TypeVar('T_co', covariant=True)
class SamplerIterDataPipe(IterDataPipe[T_co]):
Reported by Pylint.
Line: 4
Column: 1
import random
from torch.utils.data import IterDataPipe, Sampler, SequentialSampler, functional_datapipe
from typing import TypeVar, Type, Iterator, Sized, Optional, Tuple, Dict, List
T_co = TypeVar('T_co', covariant=True)
class SamplerIterDataPipe(IterDataPipe[T_co]):
Reported by Pylint.
Line: 6
Column: 1
from torch.utils.data import IterDataPipe, Sampler, SequentialSampler, functional_datapipe
from typing import TypeVar, Type, Iterator, Sized, Optional, Tuple, Dict, List
T_co = TypeVar('T_co', covariant=True)
class SamplerIterDataPipe(IterDataPipe[T_co]):
r""" :class:`SamplerIterDataPipe`.
Reported by Pylint.
Line: 27
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
sampler_args: Optional[Tuple] = None,
sampler_kwargs: Optional[Dict] = None
) -> None:
assert isinstance(datapipe, Sized), \
"Sampler class requires input datapipe implemented `__len__`"
super().__init__()
self.datapipe = datapipe
self.sampler_args = () if sampler_args is None else sampler_args
self.sampler_kwargs = {} if sampler_kwargs is None else sampler_kwargs
Reported by Bandit.
Line: 34
Column: 1
self.sampler_args = () if sampler_args is None else sampler_args
self.sampler_kwargs = {} if sampler_kwargs is None else sampler_kwargs
# https://github.com/python/mypy/pull/9629 will solve
self.sampler = sampler(data_source=self.datapipe, *self.sampler_args, **self.sampler_kwargs) # type: ignore[misc]
def __iter__(self) -> Iterator[T_co]:
return iter(self.sampler)
def __len__(self) -> int:
Reported by Pylint.
Line: 83
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
unbatch_level: int = 0
) -> None:
super().__init__()
assert buffer_size > 0, "buffer_size should be larger than 0"
if unbatch_level == 0:
self.datapipe = datapipe
else:
self.datapipe = datapipe.unbatch(unbatch_level=unbatch_level)
self.buffer_size = buffer_size
Reported by Bandit.
Line: 91
Column: 5
self.buffer_size = buffer_size
self._buffer = []
def buffer_replace(self, x):
idx = random.randint(0, self.buffer_size - 1)
val = self._buffer[idx]
self._buffer[idx] = x
return val
Reported by Pylint.
Line: 91
Column: 5
self.buffer_size = buffer_size
self._buffer = []
def buffer_replace(self, x):
idx = random.randint(0, self.buffer_size - 1)
val = self._buffer[idx]
self._buffer[idx] = x
return val
Reported by Pylint.
Line: 92
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b311-random
self._buffer = []
def buffer_replace(self, x):
idx = random.randint(0, self.buffer_size - 1)
val = self._buffer[idx]
self._buffer[idx] = x
return val
def __iter__(self) -> Iterator[T_co]:
Reported by Bandit.
torch/utils/data/datapipes/iter/httpreader.py
11 issues
Line: 25
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b310-urllib-urlopen
for furl in self.source_datapipe:
try:
if self.timeout is None:
r = urllib.urlopen(furl)
else:
r = urllib.urlopen(furl, timeout=self.timeout)
yield(furl, r)
except HTTPError as e:
Reported by Bandit.
Line: 27
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b310-urllib-urlopen
if self.timeout is None:
r = urllib.urlopen(furl)
else:
r = urllib.urlopen(furl, timeout=self.timeout)
yield(furl, r)
except HTTPError as e:
raise Exception("Could not get the file.\
[HTTP Error] {code}: {reason}."
Reported by Bandit.
Line: 31
Column: 17
yield(furl, r)
except HTTPError as e:
raise Exception("Could not get the file.\
[HTTP Error] {code}: {reason}."
.format(code=e.code, reason=e.reason))
except URLError as e:
raise Exception("Could not get the file at {url}.\
[URL Error] {reason}."
Reported by Pylint.
Line: 35
Column: 17
[HTTP Error] {code}: {reason}."
.format(code=e.code, reason=e.reason))
except URLError as e:
raise Exception("Could not get the file at {url}.\
[URL Error] {reason}."
.format(reason=e.reason, url=furl))
except Exception:
raise
Reported by Pylint.
Line: 38
Column: 13
raise Exception("Could not get the file at {url}.\
[URL Error] {reason}."
.format(reason=e.reason, url=furl))
except Exception:
raise
Reported by Pylint.
Line: 1
Column: 1
from io import IOBase
from typing import Tuple
from urllib.error import HTTPError, URLError
import urllib.request as urllib
from torch.utils.data import IterDataPipe
class HTTPReaderIterDataPipe(IterDataPipe[Tuple[str, IOBase]]):
r""" :class:`HTTPReaderIterDataPipe`
Reported by Pylint.
Line: 8
Column: 1
from torch.utils.data import IterDataPipe
class HTTPReaderIterDataPipe(IterDataPipe[Tuple[str, IOBase]]):
r""" :class:`HTTPReaderIterDataPipe`
Iterable DataPipe to load file url(s) (http url(s) pointing to file(s)),
yield file url and IO stream in a tuple
args:
Reported by Pylint.
Line: 25
Column: 21
for furl in self.source_datapipe:
try:
if self.timeout is None:
r = urllib.urlopen(furl)
else:
r = urllib.urlopen(furl, timeout=self.timeout)
yield(furl, r)
except HTTPError as e:
Reported by Pylint.
Line: 27
Column: 21
if self.timeout is None:
r = urllib.urlopen(furl)
else:
r = urllib.urlopen(furl, timeout=self.timeout)
yield(furl, r)
except HTTPError as e:
raise Exception("Could not get the file.\
[HTTP Error] {code}: {reason}."
Reported by Pylint.
Line: 30
Column: 13
r = urllib.urlopen(furl, timeout=self.timeout)
yield(furl, r)
except HTTPError as e:
raise Exception("Could not get the file.\
[HTTP Error] {code}: {reason}."
.format(code=e.code, reason=e.reason))
except URLError as e:
raise Exception("Could not get the file at {url}.\
Reported by Pylint.
torch/multiprocessing/queue.py
11 issues
Line: 46
Column: 9
if not isinstance(self._reader, ConnectionWrapper):
self._reader: ConnectionWrapper = ConnectionWrapper(self._reader)
self._writer: ConnectionWrapper = ConnectionWrapper(self._writer)
super(SimpleQueue, self)._make_methods() # type: ignore[misc]
Reported by Pylint.
Line: 21
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b301-pickle
def recv(self):
buf = self.recv_bytes()
return pickle.loads(buf)
def __getattr__(self, name):
if 'conn' in self.__dict__:
return getattr(self.conn, name)
raise AttributeError("'{}' object has no attribute '{}'".format(
Reported by Bandit.
Line: 1
Column: 1
import io
import multiprocessing.queues
from multiprocessing.reduction import ForkingPickler
import pickle
class ConnectionWrapper(object):
"""Proxy class for _multiprocessing.Connection which uses ForkingPickler to
serialize objects"""
Reported by Pylint.
Line: 4
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle
import io
import multiprocessing.queues
from multiprocessing.reduction import ForkingPickler
import pickle
class ConnectionWrapper(object):
"""Proxy class for _multiprocessing.Connection which uses ForkingPickler to
serialize objects"""
Reported by Bandit.
Line: 7
Column: 1
import pickle
class ConnectionWrapper(object):
"""Proxy class for _multiprocessing.Connection which uses ForkingPickler to
serialize objects"""
def __init__(self, conn):
self.conn = conn
Reported by Pylint.
Line: 14
Column: 5
def __init__(self, conn):
self.conn = conn
def send(self, obj):
buf = io.BytesIO()
ForkingPickler(buf, pickle.HIGHEST_PROTOCOL).dump(obj)
self.send_bytes(buf.getvalue())
def recv(self):
Reported by Pylint.
Line: 19
Column: 5
ForkingPickler(buf, pickle.HIGHEST_PROTOCOL).dump(obj)
self.send_bytes(buf.getvalue())
def recv(self):
buf = self.recv_bytes()
return pickle.loads(buf)
def __getattr__(self, name):
if 'conn' in self.__dict__:
Reported by Pylint.
Line: 30
Column: 1
type(self).__name__, 'conn'))
class Queue(multiprocessing.queues.Queue):
def __init__(self, *args, **kwargs):
super(Queue, self).__init__(*args, **kwargs)
self._reader: ConnectionWrapper = ConnectionWrapper(self._reader)
self._writer: ConnectionWrapper = ConnectionWrapper(self._writer)
Reported by Pylint.
Line: 33
Column: 9
class Queue(multiprocessing.queues.Queue):
def __init__(self, *args, **kwargs):
super(Queue, self).__init__(*args, **kwargs)
self._reader: ConnectionWrapper = ConnectionWrapper(self._reader)
self._writer: ConnectionWrapper = ConnectionWrapper(self._writer)
self._send = self._writer.send
self._recv = self._reader.recv
Reported by Pylint.
Line: 40
Column: 1
self._recv = self._reader.recv
class SimpleQueue(multiprocessing.queues.SimpleQueue):
def _make_methods(self):
if not isinstance(self._reader, ConnectionWrapper):
self._reader: ConnectionWrapper = ConnectionWrapper(self._reader)
self._writer: ConnectionWrapper = ConnectionWrapper(self._writer)
Reported by Pylint.