The following issues were found
torch/quantization/_equalize.py
20 issues
Line: 84
Column: 23
# producing scaling factors to applied
weight2_range += 1e-9
scaling_factors = torch.sqrt(weight1_range / weight2_range)
inverse_scaling_factors = torch.reciprocal(scaling_factors)
bias = bias * inverse_scaling_factors
# formatting the scaling (1D) tensors to be applied on the given argument tensors
Reported by Pylint.
Line: 85
Column: 31
# producing scaling factors to applied
weight2_range += 1e-9
scaling_factors = torch.sqrt(weight1_range / weight2_range)
inverse_scaling_factors = torch.reciprocal(scaling_factors)
bias = bias * inverse_scaling_factors
# formatting the scaling (1D) tensors to be applied on the given argument tensors
# pads axis to (1D) tensors to then be broadcasted
Reported by Pylint.
Line: 96
Column: 23
size2 = [1] * weight2.ndim
size2[input_axis] = weight2.size(input_axis)
scaling_factors = torch.reshape(scaling_factors, size2)
inverse_scaling_factors = torch.reshape(inverse_scaling_factors, size1)
weight1 = weight1 * inverse_scaling_factors
weight2 = weight2 * scaling_factors
Reported by Pylint.
Line: 97
Column: 31
size2[input_axis] = weight2.size(input_axis)
scaling_factors = torch.reshape(scaling_factors, size2)
inverse_scaling_factors = torch.reshape(inverse_scaling_factors, size1)
weight1 = weight1 * inverse_scaling_factors
weight2 = weight2 * scaling_factors
set_module_weight(module1, weight1)
Reported by Pylint.
Line: 157
Column: 20
if curr_modules.keys() != prev_modules.keys():
raise ValueError("The keys to the given mappings must have the same set of names of modules")
summed_norms = torch.tensor(0.)
if None in prev_modules.values():
return False
for name in curr_modules.keys():
curr_weight = get_module_weight(curr_modules[name])
prev_weight = get_module_weight(prev_modules[name])
Reported by Pylint.
Line: 33
Column: 19
else:
return module[0].bias
def max_over_ndim(input, axis_list, keepdim=False):
''' Applies 'torch.max' over the given axises
'''
axis_list.sort(reverse=True)
for axis in axis_list:
input, _ = input.max(axis, keepdim)
Reported by Pylint.
Line: 41
Column: 19
input, _ = input.max(axis, keepdim)
return input
def min_over_ndim(input, axis_list, keepdim=False):
''' Applies 'torch.min' over the given axises
'''
axis_list.sort(reverse=True)
for axis in axis_list:
input, _ = input.min(axis, keepdim)
Reported by Pylint.
Line: 49
Column: 19
input, _ = input.min(axis, keepdim)
return input
def channel_range(input, axis=0):
''' finds the range of weights associated with a specific channel
'''
size_of_tensor_dim = input.ndim
axis_list = list(range(size_of_tensor_dim))
axis_list.remove(axis)
Reported by Pylint.
Line: 1
Column: 1
import torch
import copy
from typing import Dict, Any
_supported_types = {torch.nn.Conv2d, torch.nn.Linear}
_supported_intrinsic_types = {torch.nn.intrinsic.ConvReLU2d, torch.nn.intrinsic.LinearReLU}
_all_supported_types = _supported_types.union(_supported_intrinsic_types)
def set_module_weight(module, weight) -> None:
Reported by Pylint.
Line: 2
Column: 1
import torch
import copy
from typing import Dict, Any
_supported_types = {torch.nn.Conv2d, torch.nn.Linear}
_supported_intrinsic_types = {torch.nn.intrinsic.ConvReLU2d, torch.nn.intrinsic.LinearReLU}
_all_supported_types = _supported_types.union(_supported_intrinsic_types)
def set_module_weight(module, weight) -> None:
Reported by Pylint.
torch/package/find_file_dependencies.py
20 issues
Line: 5
Column: 1
import sys
from typing import List, Optional, Tuple
from ._importlib import _resolve_name
class _ExtractModuleReferences(ast.NodeVisitor):
"""
Extract the list of global variables a block of code will read and write
Reported by Pylint.
Line: 98
Column: 13
self.references[(name, alias)] = True
else:
self.references[(name, None)] = True
except Exception as e:
return
find_files_source_depends_on = _ExtractModuleReferences.run
Reported by Pylint.
Line: 98
Column: 20
self.references[(name, alias)] = True
else:
self.references[(name, None)] = True
except Exception as e:
return
find_files_source_depends_on = _ExtractModuleReferences.run
Reported by Pylint.
Line: 1
Column: 1
import ast
import sys
from typing import List, Optional, Tuple
from ._importlib import _resolve_name
class _ExtractModuleReferences(ast.NodeVisitor):
"""
Reported by Pylint.
Line: 14
Column: 5
"""
@classmethod
def run(cls, src: str, package: str) -> List[Tuple[str, Optional[str]]]:
visitor = cls(package)
tree = ast.parse(src)
visitor.visit(tree)
return list(visitor.references.keys())
Reported by Pylint.
Line: 30
Column: 5
return _resolve_name(module_name, self.package, level)
return module_name
def visit_Import(self, node):
for alias in node.names:
self.references[(alias.name, None)] = True
def visit_ImportFrom(self, node):
name = self._absmodule(node.module, 0 if node.level is None else node.level)
Reported by Pylint.
Line: 30
Column: 5
return _resolve_name(module_name, self.package, level)
return module_name
def visit_Import(self, node):
for alias in node.names:
self.references[(alias.name, None)] = True
def visit_ImportFrom(self, node):
name = self._absmodule(node.module, 0 if node.level is None else node.level)
Reported by Pylint.
Line: 34
Column: 5
for alias in node.names:
self.references[(alias.name, None)] = True
def visit_ImportFrom(self, node):
name = self._absmodule(node.module, 0 if node.level is None else node.level)
for alias in node.names:
# from my_package import foo
# foo may be a module, so we have to add it to the list of
# potential references, if import of it fails, we will ignore it
Reported by Pylint.
Line: 34
Column: 5
for alias in node.names:
self.references[(alias.name, None)] = True
def visit_ImportFrom(self, node):
name = self._absmodule(node.module, 0 if node.level is None else node.level)
for alias in node.names:
# from my_package import foo
# foo may be a module, so we have to add it to the list of
# potential references, if import of it fails, we will ignore it
Reported by Pylint.
Line: 45
Column: 5
else:
self.references[(name, None)] = True
def _grab_node_int(self, node):
if sys.version_info[:2] < (3, 8):
return node.n
else:
return node.value
Reported by Pylint.
torch/utils/data/datapipes/iter/callable.py
20 issues
Line: 9
Column: 3
try:
import dill
# XXX: By default, dill writes the Pickler dispatch table to inject its
# own logic there. This globally affects the behavior of the standard library
# pickler for any user who transitively depends on this module!
# Undo this extension to avoid altering the behavior of the pickler globally.
dill.extend(use_dill=False)
DILL_AVAILABLE = True
Reported by Pylint.
Line: 105
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b301-pickle
def __setstate__(self, state):
(self.datapipe, dill_function, self.args, self.kwargs, self.nesting_level) = state
if DILL_AVAILABLE:
self.fn = dill.loads(dill_function) # type: ignore[assignment]
else:
self.fn = dill_function # type: ignore[assignment]
@functional_datapipe('collate')
Reported by Bandit.
Line: 1
Column: 1
import warnings
import torch.nn as nn
from torch.utils.data import IterDataPipe, _utils, functional_datapipe, DataChunk
from typing import Callable, Dict, Iterator, Optional, Sized, Tuple, TypeVar
try:
import dill
# XXX: By default, dill writes the Pickler dispatch table to inject its
Reported by Pylint.
Line: 4
Column: 1
import warnings
import torch.nn as nn
from torch.utils.data import IterDataPipe, _utils, functional_datapipe, DataChunk
from typing import Callable, Dict, Iterator, Optional, Sized, Tuple, TypeVar
try:
import dill
# XXX: By default, dill writes the Pickler dispatch table to inject its
Reported by Pylint.
Line: 7
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle
from typing import Callable, Dict, Iterator, Optional, Sized, Tuple, TypeVar
try:
import dill
# XXX: By default, dill writes the Pickler dispatch table to inject its
# own logic there. This globally affects the behavior of the standard library
# pickler for any user who transitively depends on this module!
# Undo this extension to avoid altering the behavior of the pickler globally.
Reported by Bandit.
Line: 18
Column: 1
except ImportError:
DILL_AVAILABLE = False
T_co = TypeVar('T_co', covariant=True)
# Default function to return each item directly
# In order to keep datapipe picklable, eliminates the usage
# of python lambda function
Reported by Pylint.
Line: 24
Column: 1
# Default function to return each item directly
# In order to keep datapipe picklable, eliminates the usage
# of python lambda function
def default_fn(data):
return data
@functional_datapipe('map')
class MapIterDataPipe(IterDataPipe[T_co]):
Reported by Pylint.
Line: 40
Column: 1
fn: Function called over each item
fn_args: Positional arguments for `fn`
fn_kwargs: Keyword arguments for `fn`
nesting_level: Determines which level the fn gets applied to, by default it applies to the top level (= 0)
This also accepts -1 as input to apply the function to the lowest nesting level. It currently doesn't support
argument < -1.
"""
datapipe: IterDataPipe
fn: Callable
Reported by Pylint.
Line: 41
Column: 1
fn_args: Positional arguments for `fn`
fn_kwargs: Keyword arguments for `fn`
nesting_level: Determines which level the fn gets applied to, by default it applies to the top level (= 0)
This also accepts -1 as input to apply the function to the lowest nesting level. It currently doesn't support
argument < -1.
"""
datapipe: IterDataPipe
fn: Callable
Reported by Pylint.
Line: 47
Column: 5
datapipe: IterDataPipe
fn: Callable
def __init__(self,
datapipe: IterDataPipe,
fn: Callable = default_fn,
fn_args: Optional[Tuple] = None,
fn_kwargs: Optional[Dict] = None,
nesting_level: int = 0,
Reported by Pylint.
torch/optim/adamax.py
20 issues
Line: 2
Column: 1
import torch
from . import _functional as F
from .optimizer import Optimizer
class Adamax(Optimizer):
"""Implements Adamax algorithm (a variant of Adam based on infinity norm).
It has been proposed in `Adam: A Method for Stochastic Optimization`__.
Reported by Pylint.
Line: 3
Column: 1
import torch
from . import _functional as F
from .optimizer import Optimizer
class Adamax(Optimizer):
"""Implements Adamax algorithm (a variant of Adam based on infinity norm).
It has been proposed in `Adam: A Method for Stochastic Optimization`__.
Reported by Pylint.
Line: 78
Column: 74
# State initialization
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)
state['exp_inf'] = torch.zeros_like(p, memory_format=torch.preserve_format)
exp_avgs.append(state['exp_avg'])
exp_infs.append(state['exp_inf'])
Reported by Pylint.
Line: 78
Column: 40
# State initialization
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)
state['exp_inf'] = torch.zeros_like(p, memory_format=torch.preserve_format)
exp_avgs.append(state['exp_avg'])
exp_infs.append(state['exp_inf'])
Reported by Pylint.
Line: 79
Column: 74
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)
state['exp_inf'] = torch.zeros_like(p, memory_format=torch.preserve_format)
exp_avgs.append(state['exp_avg'])
exp_infs.append(state['exp_inf'])
state['step'] += 1
Reported by Pylint.
Line: 79
Column: 40
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)
state['exp_inf'] = torch.zeros_like(p, memory_format=torch.preserve_format)
exp_avgs.append(state['exp_avg'])
exp_infs.append(state['exp_inf'])
state['step'] += 1
Reported by Pylint.
Line: 1
Column: 1
import torch
from . import _functional as F
from .optimizer import Optimizer
class Adamax(Optimizer):
"""Implements Adamax algorithm (a variant of Adam based on infinity norm).
It has been proposed in `Adam: A Method for Stochastic Optimization`__.
Reported by Pylint.
Line: 6
Column: 1
from .optimizer import Optimizer
class Adamax(Optimizer):
"""Implements Adamax algorithm (a variant of Adam based on infinity norm).
It has been proposed in `Adam: A Method for Stochastic Optimization`__.
Args:
Reported by Pylint.
Line: 24
Column: 5
__ https://arxiv.org/abs/1412.6980
"""
def __init__(self, params, lr=2e-3, betas=(0.9, 0.999), eps=1e-8,
weight_decay=0):
if not 0.0 <= lr:
raise ValueError("Invalid learning rate: {}".format(lr))
if not 0.0 <= eps:
raise ValueError("Invalid epsilon value: {}".format(eps))
Reported by Pylint.
Line: 24
Column: 5
__ https://arxiv.org/abs/1412.6980
"""
def __init__(self, params, lr=2e-3, betas=(0.9, 0.999), eps=1e-8,
weight_decay=0):
if not 0.0 <= lr:
raise ValueError("Invalid learning rate: {}".format(lr))
if not 0.0 <= eps:
raise ValueError("Invalid epsilon value: {}".format(eps))
Reported by Pylint.
torch/nn/qat/modules/conv.py
20 issues
Line: 36
Column: 23
self.qconfig = qconfig
self.weight_fake_quant = qconfig.weight(factory_kwargs=factory_kwargs)
def forward(self, input):
return self._conv_forward(input, self.weight_fake_quant(self.weight), self.bias)
@classmethod
def from_float(cls, mod):
r"""Create a qat module from a float module or qparams_dict
Reported by Pylint.
Line: 126
Column: 23
self.qconfig = qconfig
self.weight_fake_quant = qconfig.weight(factory_kwargs=factory_kwargs)
def forward(self, input):
return self._conv_forward(input, self.weight_fake_quant(self.weight), self.bias)
@classmethod
def from_float(cls, mod):
r"""Create a qat module from a float module or qparams_dict
Reported by Pylint.
Line: 1
Column: 1
import torch
import torch.nn as nn
from torch.nn.intrinsic import ConvReLU2d, ConvReLU3d
class Conv2d(nn.Conv2d):
r"""
A Conv2d module attached with FakeQuantize modules for weight,
used for quantization aware training.
Reported by Pylint.
Line: 23
Column: 5
"""
_FLOAT_MODULE = nn.Conv2d
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1, groups=1,
bias=True, padding_mode='zeros', qconfig=None,
device=None, dtype=None) -> None:
factory_kwargs = {'device': device, 'dtype': dtype}
super().__init__(in_channels, out_channels, kernel_size,
Reported by Pylint.
Line: 32
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
stride=stride, padding=padding, dilation=dilation,
groups=groups, bias=bias, padding_mode=padding_mode,
**factory_kwargs)
assert qconfig, 'qconfig must be provided for QAT module'
self.qconfig = qconfig
self.weight_fake_quant = qconfig.weight(factory_kwargs=factory_kwargs)
def forward(self, input):
return self._conv_forward(input, self.weight_fake_quant(self.weight), self.bias)
Reported by Bandit.
Line: 46
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
Args: `mod` a float module, either produced by torch.quantization utilities
or directly from user
"""
assert type(mod) == cls._FLOAT_MODULE, 'qat.' + cls.__name__ + '.from_float only works for ' + \
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == ConvReLU2d:
mod = mod[0]
Reported by Bandit.
Line: 46
Column: 16
Args: `mod` a float module, either produced by torch.quantization utilities
or directly from user
"""
assert type(mod) == cls._FLOAT_MODULE, 'qat.' + cls.__name__ + '.from_float only works for ' + \
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == ConvReLU2d:
mod = mod[0]
Reported by Pylint.
Line: 46
Column: 1
Args: `mod` a float module, either produced by torch.quantization utilities
or directly from user
"""
assert type(mod) == cls._FLOAT_MODULE, 'qat.' + cls.__name__ + '.from_float only works for ' + \
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == ConvReLU2d:
mod = mod[0]
Reported by Pylint.
Line: 48
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
"""
assert type(mod) == cls._FLOAT_MODULE, 'qat.' + cls.__name__ + '.from_float only works for ' + \
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == ConvReLU2d:
mod = mod[0]
qconfig = mod.qconfig
qat_conv = cls(mod.in_channels, mod.out_channels, mod.kernel_size,
Reported by Bandit.
Line: 49
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert type(mod) == cls._FLOAT_MODULE, 'qat.' + cls.__name__ + '.from_float only works for ' + \
cls._FLOAT_MODULE.__name__
assert hasattr(mod, 'qconfig'), 'Input float module must have qconfig defined'
assert mod.qconfig, 'Input float module must have a valid qconfig'
if type(mod) == ConvReLU2d:
mod = mod[0]
qconfig = mod.qconfig
qat_conv = cls(mod.in_channels, mod.out_channels, mod.kernel_size,
stride=mod.stride, padding=mod.padding, dilation=mod.dilation,
Reported by Bandit.
test/distributed/elastic/utils/util_test.py
20 issues
Line: 11
Column: 1
import unittest
import torch.distributed.elastic.utils.store as store_util
from torch.distributed.elastic.utils.logging import get_logger
from torch.testing._internal.common_utils import run_tests
class TestStore:
Reported by Pylint.
Line: 12
Column: 1
import unittest
import torch.distributed.elastic.utils.store as store_util
from torch.distributed.elastic.utils.logging import get_logger
from torch.testing._internal.common_utils import run_tests
class TestStore:
def get(self, key: str):
Reported by Pylint.
Line: 13
Column: 1
import torch.distributed.elastic.utils.store as store_util
from torch.distributed.elastic.utils.logging import get_logger
from torch.testing._internal.common_utils import run_tests
class TestStore:
def get(self, key: str):
return f"retrieved:{key}"
Reported by Pylint.
Line: 1
Column: 1
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import unittest
Reported by Pylint.
Line: 16
Column: 1
from torch.testing._internal.common_utils import run_tests
class TestStore:
def get(self, key: str):
return f"retrieved:{key}"
class StoreUtilTest(unittest.TestCase):
Reported by Pylint.
Line: 16
Column: 1
from torch.testing._internal.common_utils import run_tests
class TestStore:
def get(self, key: str):
return f"retrieved:{key}"
class StoreUtilTest(unittest.TestCase):
Reported by Pylint.
Line: 17
Column: 5
class TestStore:
def get(self, key: str):
return f"retrieved:{key}"
class StoreUtilTest(unittest.TestCase):
def test_get_data(self):
Reported by Pylint.
Line: 17
Column: 5
class TestStore:
def get(self, key: str):
return f"retrieved:{key}"
class StoreUtilTest(unittest.TestCase):
def test_get_data(self):
Reported by Pylint.
Line: 21
Column: 1
return f"retrieved:{key}"
class StoreUtilTest(unittest.TestCase):
def test_get_data(self):
store = TestStore()
data = store_util.get_all(store, "test/store", 10)
for idx in range(0, 10):
self.assertEqual(f"retrieved:test/store{idx}", data[idx])
Reported by Pylint.
Line: 22
Column: 5
class StoreUtilTest(unittest.TestCase):
def test_get_data(self):
store = TestStore()
data = store_util.get_all(store, "test/store", 10)
for idx in range(0, 10):
self.assertEqual(f"retrieved:test/store{idx}", data[idx])
Reported by Pylint.
docs/caffe2/process.py
20 issues
Line: 10
Column: 38
import shutil
# Module caffe2...caffe2.python.control_test
def insert(originalfile, first_line, description):
with open(originalfile, 'r') as f:
f1 = f.readline()
if(f1.find(first_line) < 0):
docs = first_line + description + f1
with open('newfile.txt', 'w') as f2:
Reported by Pylint.
Line: 10
Column: 26
import shutil
# Module caffe2...caffe2.python.control_test
def insert(originalfile, first_line, description):
with open(originalfile, 'r') as f:
f1 = f.readline()
if(f1.find(first_line) < 0):
docs = first_line + description + f1
with open('newfile.txt', 'w') as f2:
Reported by Pylint.
Line: 1
Column: 1
#!/usr/bin/env python3
## @package process
# Module doxygen.process
# Script to insert preamble for doxygen and regen API docs
import os
import shutil
# Module caffe2...caffe2.python.control_test
Reported by Pylint.
Line: 10
Column: 1
import shutil
# Module caffe2...caffe2.python.control_test
def insert(originalfile, first_line, description):
with open(originalfile, 'r') as f:
f1 = f.readline()
if(f1.find(first_line) < 0):
docs = first_line + description + f1
with open('newfile.txt', 'w') as f2:
Reported by Pylint.
Line: 11
Column: 37
# Module caffe2...caffe2.python.control_test
def insert(originalfile, first_line, description):
with open(originalfile, 'r') as f:
f1 = f.readline()
if(f1.find(first_line) < 0):
docs = first_line + description + f1
with open('newfile.txt', 'w') as f2:
f2.write(docs)
Reported by Pylint.
Line: 12
Column: 9
# Module caffe2...caffe2.python.control_test
def insert(originalfile, first_line, description):
with open(originalfile, 'r') as f:
f1 = f.readline()
if(f1.find(first_line) < 0):
docs = first_line + description + f1
with open('newfile.txt', 'w') as f2:
f2.write(docs)
f2.write(f.read())
Reported by Pylint.
Line: 13
Column: 1
def insert(originalfile, first_line, description):
with open(originalfile, 'r') as f:
f1 = f.readline()
if(f1.find(first_line) < 0):
docs = first_line + description + f1
with open('newfile.txt', 'w') as f2:
f2.write(docs)
f2.write(f.read())
os.rename('newfile.txt', originalfile)
Reported by Pylint.
Line: 15
Column: 46
f1 = f.readline()
if(f1.find(first_line) < 0):
docs = first_line + description + f1
with open('newfile.txt', 'w') as f2:
f2.write(docs)
f2.write(f.read())
os.rename('newfile.txt', originalfile)
else:
print('already inserted')
Reported by Pylint.
Line: 24
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b607_start_process_with_partial_path.html
# move up from /caffe2_root/doxygen
os.chdir("..")
os.system("git checkout caffe2/contrib/.")
os.system("git checkout caffe2/distributed/.")
os.system("git checkout caffe2/experiments/.")
os.system("git checkout caffe2/python/.")
for root, dirs, files in os.walk("."):
Reported by Bandit.
Line: 24
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b605_start_process_with_a_shell.html
# move up from /caffe2_root/doxygen
os.chdir("..")
os.system("git checkout caffe2/contrib/.")
os.system("git checkout caffe2/distributed/.")
os.system("git checkout caffe2/experiments/.")
os.system("git checkout caffe2/python/.")
for root, dirs, files in os.walk("."):
Reported by Bandit.
caffe2/python/operator_test/ctc_greedy_decoder_op_test.py
20 issues
Line: 7
Column: 1
from caffe2.python import core
from hypothesis import given, settings
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np
import unittest
Reported by Pylint.
Line: 10
Column: 1
from hypothesis import given, settings
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np
import unittest
class TestCTCGreedyDecoderOp(serial.SerializedTestCase):
Reported by Pylint.
Line: 27
Column: 42
@settings(deadline=10000)
def test_ctc_greedy_decoder(
self, batch, max_time,
num_classes, merge_repeated, gc, dc
):
def input_generater():
inputs = np.random.rand(max_time, batch, num_classes)\
.astype(np.float32)
Reported by Pylint.
Line: 97
Column: 26
@settings(deadline=10000)
def test_ctc_greedy_decoder_no_merge_arg(
self, batch, max_time,
num_classes, gc, dc
):
def input_generater():
inputs = np.random.rand(max_time, batch, num_classes)\
.astype(np.float32)
Reported by Pylint.
Line: 134
Column: 17
def ref_ctc_decoder_max_time(inputs):
return ref_ctc_decoder_no_merge_arg(inputs, None)
inputs, seq_len = input_generater()
op = core.CreateOperator('CTCGreedyDecoder',
['INPUTS'],
['OUTPUT_LEN', 'VALUES'])
Reported by Pylint.
Line: 1
Column: 1
from caffe2.python import core
from hypothesis import given, settings
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
Reported by Pylint.
Line: 12
Column: 1
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np
import unittest
class TestCTCGreedyDecoderOp(serial.SerializedTestCase):
@given(
Reported by Pylint.
Line: 15
Column: 1
import unittest
class TestCTCGreedyDecoderOp(serial.SerializedTestCase):
@given(
batch=st.sampled_from([2, 4, 128, 256]),
max_time=st.sampled_from([2, 10, 30, 50]),
num_classes=st.sampled_from([2, 10, 26, 40]),
Reported by Pylint.
Line: 24
Column: 5
merge_repeated=st.sampled_from([True, False]),
**hu.gcs_cpu_only
)
@settings(deadline=10000)
def test_ctc_greedy_decoder(
self, batch, max_time,
num_classes, merge_repeated, gc, dc
):
Reported by Pylint.
Line: 24
Column: 5
merge_repeated=st.sampled_from([True, False]),
**hu.gcs_cpu_only
)
@settings(deadline=10000)
def test_ctc_greedy_decoder(
self, batch, max_time,
num_classes, merge_repeated, gc, dc
):
Reported by Pylint.
caffe2/python/operator_test/math_ops_test.py
20 issues
Line: 7
Column: 1
from caffe2.python import core
from hypothesis import given, settings
from hypothesis import strategies as st
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import numpy as np
Reported by Pylint.
Line: 8
Column: 1
from caffe2.python import core
from hypothesis import given, settings
from hypothesis import strategies as st
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import numpy as np
import unittest
Reported by Pylint.
Line: 21
Column: 55
@given(X=hu.tensor(),
exponent=st.floats(min_value=2.0, max_value=3.0),
**hu.gcs)
def test_elementwise_power(self, X, exponent, gc, dc):
# negative integer raised with non-integer exponent is domain error
X = np.abs(X)
def powf(X):
return (X ** exponent,)
Reported by Pylint.
Line: 27
Column: 30
def powf(X):
return (X ** exponent,)
def powf_grad(g_out, outputs, fwd_inputs):
return (exponent * (fwd_inputs[0] ** (exponent - 1)) * g_out,)
op = core.CreateOperator(
"Pow", ["X"], ["Y"], exponent=exponent)
Reported by Pylint.
Line: 42
Column: 28
exponent=st.floats(min_value=-3.0, max_value=3.0),
**hu.gcs)
@settings(deadline=10000)
def test_sign(self, X, exponent, gc, dc):
def signf(X):
return [np.sign(X)]
op = core.CreateOperator(
"Sign", ["X"], ["Y"])
Reported by Pylint.
Line: 1
Column: 1
from caffe2.python import core
from hypothesis import given, settings
from hypothesis import strategies as st
import caffe2.python.hypothesis_test_util as hu
Reported by Pylint.
Line: 13
Column: 1
import caffe2.python.serialized_test.serialized_test_util as serial
import numpy as np
import unittest
class TestMathOps(serial.SerializedTestCase):
@given(X=hu.tensor(),
Reported by Pylint.
Line: 16
Column: 1
import unittest
class TestMathOps(serial.SerializedTestCase):
@given(X=hu.tensor(),
exponent=st.floats(min_value=2.0, max_value=3.0),
**hu.gcs)
def test_elementwise_power(self, X, exponent, gc, dc):
Reported by Pylint.
Line: 21
Column: 5
@given(X=hu.tensor(),
exponent=st.floats(min_value=2.0, max_value=3.0),
**hu.gcs)
def test_elementwise_power(self, X, exponent, gc, dc):
# negative integer raised with non-integer exponent is domain error
X = np.abs(X)
def powf(X):
return (X ** exponent,)
Reported by Pylint.
Line: 21
Column: 5
@given(X=hu.tensor(),
exponent=st.floats(min_value=2.0, max_value=3.0),
**hu.gcs)
def test_elementwise_power(self, X, exponent, gc, dc):
# negative integer raised with non-integer exponent is domain error
X = np.abs(X)
def powf(X):
return (X ** exponent,)
Reported by Pylint.
docs/cpp/source/conf.py
20 issues
Line: 122
Column: 1
# General information about the project.
project = 'PyTorch'
copyright = '2019, Torch Contributors'
author = 'Torch Contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
Reported by Pylint.
Line: 130
Column: 3
# built documents.
#
# The short X.Y version.
# TODO: change to [:2] at v1.0
version = 'master'
# The full version, including alpha/beta/rc tags.
# TODO: verify this works as expected
release = 'master'
Reported by Pylint.
Line: 133
Column: 3
# TODO: change to [:2] at v1.0
version = 'master'
# The full version, including alpha/beta/rc tags.
# TODO: verify this works as expected
release = 'master'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
Reported by Pylint.
Line: 1
Column: 1
# -*- coding: utf-8 -*-
#
# PyTorch documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 23 13:31:47 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
Reported by Pylint.
Line: 28
Column: 1
# If your documentation needs a minimal Sphinx version, state it here.
#
needs_sphinx = '3.1.2'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
Reported by Pylint.
Line: 61
Column: 1
)
breathe_projects = {"PyTorch": doxygen_xml_dir}
breathe_default_project = "PyTorch"
# Setup the exhale extension
exhale_args = {
############################################################################
# These arguments are required. #
Reported by Pylint.
Line: 103
Column: 1
}
# Tell sphinx what the primary language being documented is.
primary_domain = 'cpp'
# Tell sphinx what the pygments highlight language should be.
highlight_language = 'cpp'
# Add any paths that contain templates here, relative to this directory.
Reported by Pylint.
Line: 106
Column: 1
primary_domain = 'cpp'
# Tell sphinx what the pygments highlight language should be.
highlight_language = 'cpp'
# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
# The suffix(es) of source filenames.
Reported by Pylint.
Line: 115
Column: 1
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
Reported by Pylint.
Line: 118
Column: 1
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'PyTorch'
copyright = '2019, Torch Contributors'
author = 'Torch Contributors'
Reported by Pylint.