The following issues were found
torch/types.py
26 issues
Line: 19
Column: 10
_float = builtins.float
_bool = builtins.bool
_dtype = torch.dtype
_device = torch.device
_qscheme = torch.qscheme
_size = Union[torch.Size, List[_int], Tuple[_int, ...]]
_layout = torch.layout
Reported by Pylint.
Line: 20
Column: 11
_bool = builtins.bool
_dtype = torch.dtype
_device = torch.device
_qscheme = torch.qscheme
_size = Union[torch.Size, List[_int], Tuple[_int, ...]]
_layout = torch.layout
# Meta-type for "numeric" things; matches our docs
Reported by Pylint.
Line: 21
Column: 12
_dtype = torch.dtype
_device = torch.device
_qscheme = torch.qscheme
_size = Union[torch.Size, List[_int], Tuple[_int, ...]]
_layout = torch.layout
# Meta-type for "numeric" things; matches our docs
Number = Union[builtins.int, builtins.float, builtins.bool]
Reported by Pylint.
Line: 22
Column: 15
_dtype = torch.dtype
_device = torch.device
_qscheme = torch.qscheme
_size = Union[torch.Size, List[_int], Tuple[_int, ...]]
_layout = torch.layout
# Meta-type for "numeric" things; matches our docs
Number = Union[builtins.int, builtins.float, builtins.bool]
Reported by Pylint.
Line: 23
Column: 11
_device = torch.device
_qscheme = torch.qscheme
_size = Union[torch.Size, List[_int], Tuple[_int, ...]]
_layout = torch.layout
# Meta-type for "numeric" things; matches our docs
Number = Union[builtins.int, builtins.float, builtins.bool]
# Meta-type for "device-like" things. Not to be confused with 'device' (a
Reported by Pylint.
Line: 41
Column: 27
def __deepcopy__(self, memo) -> 'Storage':
...
def _new_shared(self, int) -> 'Storage':
...
def _write_file(self, f: Any, is_real_file: _bool, save_size: _bool) -> None:
...
Reported by Pylint.
Line: 41
Column: 27
def __deepcopy__(self, memo) -> 'Storage':
...
def _new_shared(self, int) -> 'Storage':
...
def _write_file(self, f: Any, is_real_file: _bool, save_size: _bool) -> None:
...
Reported by Pylint.
Line: 44
Column: 56
def _new_shared(self, int) -> 'Storage':
...
def _write_file(self, f: Any, is_real_file: _bool, save_size: _bool) -> None:
...
def element_size(self) -> int:
...
Reported by Pylint.
Line: 44
Column: 35
def _new_shared(self, int) -> 'Storage':
...
def _write_file(self, f: Any, is_real_file: _bool, save_size: _bool) -> None:
...
def element_size(self) -> int:
...
Reported by Pylint.
Line: 44
Column: 27
def _new_shared(self, int) -> 'Storage':
...
def _write_file(self, f: Any, is_real_file: _bool, save_size: _bool) -> None:
...
def element_size(self) -> int:
...
Reported by Pylint.
test/test_modules.py
26 issues
Line: 1
Column: 1
import torch
from torch.testing._internal.common_device_type import instantiate_device_type_tests
from torch.testing._internal.common_modules import module_db, modules
from torch.testing._internal.common_utils import (
TestCase, run_tests, freeze_rng_state, mock_wrapper, get_tensors_from)
from unittest.mock import patch
class TestModule(TestCase):
Reported by Pylint.
Line: 2
Column: 1
import torch
from torch.testing._internal.common_device_type import instantiate_device_type_tests
from torch.testing._internal.common_modules import module_db, modules
from torch.testing._internal.common_utils import (
TestCase, run_tests, freeze_rng_state, mock_wrapper, get_tensors_from)
from unittest.mock import patch
class TestModule(TestCase):
Reported by Pylint.
Line: 3
Column: 1
import torch
from torch.testing._internal.common_device_type import instantiate_device_type_tests
from torch.testing._internal.common_modules import module_db, modules
from torch.testing._internal.common_utils import (
TestCase, run_tests, freeze_rng_state, mock_wrapper, get_tensors_from)
from unittest.mock import patch
class TestModule(TestCase):
Reported by Pylint.
Line: 4
Column: 1
import torch
from torch.testing._internal.common_device_type import instantiate_device_type_tests
from torch.testing._internal.common_modules import module_db, modules
from torch.testing._internal.common_utils import (
TestCase, run_tests, freeze_rng_state, mock_wrapper, get_tensors_from)
from unittest.mock import patch
class TestModule(TestCase):
Reported by Pylint.
Line: 35
Column: 3
outputs = m(*args, **kwargs)
# === Compare outputs to a reference if one is specified. ===
# TODO: Handle precision
reference_fn = module_input.reference_fn
if reference_fn is not None:
ref_outputs = reference_fn(m, *args, **kwargs)
self.assertEqual(outputs, ref_outputs)
Reported by Pylint.
Line: 1
Column: 1
import torch
from torch.testing._internal.common_device_type import instantiate_device_type_tests
from torch.testing._internal.common_modules import module_db, modules
from torch.testing._internal.common_utils import (
TestCase, run_tests, freeze_rng_state, mock_wrapper, get_tensors_from)
from unittest.mock import patch
class TestModule(TestCase):
Reported by Pylint.
Line: 6
Column: 1
from torch.testing._internal.common_modules import module_db, modules
from torch.testing._internal.common_utils import (
TestCase, run_tests, freeze_rng_state, mock_wrapper, get_tensors_from)
from unittest.mock import patch
class TestModule(TestCase):
_do_cuda_memory_leak_check = True
_do_cuda_non_default_stream = True
Reported by Pylint.
Line: 9
Column: 1
from unittest.mock import patch
class TestModule(TestCase):
_do_cuda_memory_leak_check = True
_do_cuda_non_default_stream = True
precision = 1e-5
rel_tol = 1e-5
Reported by Pylint.
Line: 16
Column: 5
rel_tol = 1e-5
@modules(module_db)
def test_forward(self, device, dtype, module_info):
module_cls = module_info.module_cls
module_inputs = module_info.module_inputs_func(module_info, device=device, dtype=dtype,
requires_grad=False)
for module_input in module_inputs:
if module_input.forward_input is None:
Reported by Pylint.
Line: 26
Column: 1
with freeze_rng_state():
# === Instantiate the module. ===
args, kwargs = module_input.constructor_input.args, module_input.constructor_input.kwargs
m = module_cls(*args, **kwargs)
m.to(device).to(dtype)
# === Do forward pass. ===
args, kwargs = module_input.forward_input.args, module_input.forward_input.kwargs
Reported by Pylint.
modules/detectron/upsample_nearest_op_test.py
26 issues
Line: 4
Column: 1
import unittest
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep
from hypothesis import given, settings
Reported by Pylint.
Line: 5
Column: 1
import unittest
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep
from hypothesis import given, settings
Reported by Pylint.
Line: 7
Column: 1
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep
from hypothesis import given, settings
dyndep.InitOpsLibrary("@/caffe2/modules/detectron:detectron_ops")
Reported by Pylint.
Line: 8
Column: 1
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep
from hypothesis import given, settings
dyndep.InitOpsLibrary("@/caffe2/modules/detectron:detectron_ops")
Reported by Pylint.
Line: 23
Column: 60
**hu.gcs
)
@settings(deadline=None, max_examples=20)
def test_upsample_nearest_op(self, N, H, W, scale, gc, dc):
C = 32
X = np.random.randn(N, C, H, W).astype(np.float32)
op = core.CreateOperator("UpsampleNearest", ["X"], ["Y"], scale=scale)
def ref(X):
Reported by Pylint.
Line: 1
Column: 1
import unittest
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep
from hypothesis import given, settings
Reported by Pylint.
Line: 14
Column: 1
dyndep.InitOpsLibrary("@/caffe2/modules/detectron:detectron_ops")
class TestUpsampleNearestOp(hu.HypothesisTestCase):
@given(
N=st.integers(1, 3),
H=st.integers(10, 300),
W=st.integers(10, 300),
scale=st.integers(1, 3),
Reported by Pylint.
Line: 14
Column: 1
dyndep.InitOpsLibrary("@/caffe2/modules/detectron:detectron_ops")
class TestUpsampleNearestOp(hu.HypothesisTestCase):
@given(
N=st.integers(1, 3),
H=st.integers(10, 300),
W=st.integers(10, 300),
scale=st.integers(1, 3),
Reported by Pylint.
Line: 22
Column: 5
scale=st.integers(1, 3),
**hu.gcs
)
@settings(deadline=None, max_examples=20)
def test_upsample_nearest_op(self, N, H, W, scale, gc, dc):
C = 32
X = np.random.randn(N, C, H, W).astype(np.float32)
op = core.CreateOperator("UpsampleNearest", ["X"], ["Y"], scale=scale)
Reported by Pylint.
Line: 22
Column: 5
scale=st.integers(1, 3),
**hu.gcs
)
@settings(deadline=None, max_examples=20)
def test_upsample_nearest_op(self, N, H, W, scale, gc, dc):
C = 32
X = np.random.randn(N, C, H, W).astype(np.float32)
op = core.CreateOperator("UpsampleNearest", ["X"], ["Y"], scale=scale)
Reported by Pylint.
tools/code_coverage/package/util/utils.py
26 issues
Line: 7
Column: 1
import time
from typing import Any, NoReturn, Optional
from .setting import (
LOG_DIR,
PROFILE_DIR,
CompilerType,
TestList,
TestPlatform,
Reported by Pylint.
Line: 96
Column: 9
cov_type = detect_compiler_type() # type: ignore[call-arg]
else:
from caffe2.fb.code_coverage.tool.package.fbcode.utils import ( # type: ignore[import]
detect_compiler_type,
)
cov_type = detect_compiler_type()
Reported by Pylint.
Line: 92
Column: 9
def detect_compiler_type(platform: TestPlatform) -> CompilerType:
if platform == TestPlatform.OSS:
from package.oss.utils import detect_compiler_type # type: ignore[misc]
cov_type = detect_compiler_type() # type: ignore[call-arg]
else:
from caffe2.fb.code_coverage.tool.package.fbcode.utils import ( # type: ignore[import]
detect_compiler_type,
Reported by Pylint.
Line: 1
Column: 1
import os
import shutil
import sys
import time
from typing import Any, NoReturn, Optional
from .setting import (
LOG_DIR,
PROFILE_DIR,
Reported by Pylint.
Line: 17
Column: 1
)
def convert_time(seconds: float) -> str:
seconds = int(round(seconds))
seconds = seconds % (24 * 3600)
hour = seconds // 3600
seconds %= 3600
minutes = seconds // 60
Reported by Pylint.
Line: 28
Column: 1
return "%d:%02d:%02d" % (hour, minutes, seconds)
def print_time(message: str, start_time: float, summary_time: bool = False) -> None:
with open(os.path.join(LOG_DIR, "log.txt"), "a+") as log_file:
end_time = time.time()
print(message, convert_time(end_time - start_time), file=log_file)
if summary_time:
print("\n", file=log_file)
Reported by Pylint.
Line: 36
Column: 1
print("\n", file=log_file)
def print_log(*args: Any) -> None:
with open(os.path.join(LOG_DIR, "log.txt"), "a+") as log_file:
print(f"[LOG] {' '.join(args)}", file=log_file)
def print_error(*args: Any) -> None:
Reported by Pylint.
Line: 41
Column: 1
print(f"[LOG] {' '.join(args)}", file=log_file)
def print_error(*args: Any) -> None:
with open(os.path.join(LOG_DIR, "log.txt"), "a+") as log_file:
print(f"[ERROR] {' '.join(args)}", file=log_file)
def remove_file(path: str) -> None:
Reported by Pylint.
Line: 46
Column: 1
print(f"[ERROR] {' '.join(args)}", file=log_file)
def remove_file(path: str) -> None:
if os.path.exists(path):
os.remove(path)
def remove_folder(path: str) -> None:
Reported by Pylint.
Line: 51
Column: 1
os.remove(path)
def remove_folder(path: str) -> None:
shutil.rmtree(path)
def create_folder(*paths: Any) -> None:
for path in paths:
Reported by Pylint.
torch/nn/common_types.py
26 issues
Line: 2
Column: 1
from typing import TypeVar, Union, Tuple, Optional
from .. import Tensor
# Create some useful type aliases
# Template for arguments which can be supplied as a tuple, or which can be a scalar which PyTorch will internally
# broadcast to a tuple.
# Comes in several variants: A tuple of unknown size, and a fixed-size tuple for 1d, 2d, or 3d operations.
T = TypeVar('T')
Reported by Pylint.
Line: 1
Column: 1
from typing import TypeVar, Union, Tuple, Optional
from .. import Tensor
# Create some useful type aliases
# Template for arguments which can be supplied as a tuple, or which can be a scalar which PyTorch will internally
# broadcast to a tuple.
# Comes in several variants: A tuple of unknown size, and a fixed-size tuple for 1d, 2d, or 3d operations.
T = TypeVar('T')
Reported by Pylint.
Line: 6
Column: 1
# Create some useful type aliases
# Template for arguments which can be supplied as a tuple, or which can be a scalar which PyTorch will internally
# broadcast to a tuple.
# Comes in several variants: A tuple of unknown size, and a fixed-size tuple for 1d, 2d, or 3d operations.
T = TypeVar('T')
_scalar_or_tuple_any_t = Union[T, Tuple[T, ...]]
_scalar_or_tuple_1_t = Union[T, Tuple[T]]
Reported by Pylint.
Line: 8
Column: 1
# Template for arguments which can be supplied as a tuple, or which can be a scalar which PyTorch will internally
# broadcast to a tuple.
# Comes in several variants: A tuple of unknown size, and a fixed-size tuple for 1d, 2d, or 3d operations.
T = TypeVar('T')
_scalar_or_tuple_any_t = Union[T, Tuple[T, ...]]
_scalar_or_tuple_1_t = Union[T, Tuple[T]]
_scalar_or_tuple_2_t = Union[T, Tuple[T, T]]
_scalar_or_tuple_3_t = Union[T, Tuple[T, T, T]]
Reported by Pylint.
Line: 9
Column: 1
# Template for arguments which can be supplied as a tuple, or which can be a scalar which PyTorch will internally
# broadcast to a tuple.
# Comes in several variants: A tuple of unknown size, and a fixed-size tuple for 1d, 2d, or 3d operations.
T = TypeVar('T')
_scalar_or_tuple_any_t = Union[T, Tuple[T, ...]]
_scalar_or_tuple_1_t = Union[T, Tuple[T]]
_scalar_or_tuple_2_t = Union[T, Tuple[T, T]]
_scalar_or_tuple_3_t = Union[T, Tuple[T, T, T]]
_scalar_or_tuple_4_t = Union[T, Tuple[T, T, T, T]]
Reported by Pylint.
Line: 10
Column: 1
# broadcast to a tuple.
# Comes in several variants: A tuple of unknown size, and a fixed-size tuple for 1d, 2d, or 3d operations.
T = TypeVar('T')
_scalar_or_tuple_any_t = Union[T, Tuple[T, ...]]
_scalar_or_tuple_1_t = Union[T, Tuple[T]]
_scalar_or_tuple_2_t = Union[T, Tuple[T, T]]
_scalar_or_tuple_3_t = Union[T, Tuple[T, T, T]]
_scalar_or_tuple_4_t = Union[T, Tuple[T, T, T, T]]
_scalar_or_tuple_5_t = Union[T, Tuple[T, T, T, T, T]]
Reported by Pylint.
Line: 11
Column: 1
# Comes in several variants: A tuple of unknown size, and a fixed-size tuple for 1d, 2d, or 3d operations.
T = TypeVar('T')
_scalar_or_tuple_any_t = Union[T, Tuple[T, ...]]
_scalar_or_tuple_1_t = Union[T, Tuple[T]]
_scalar_or_tuple_2_t = Union[T, Tuple[T, T]]
_scalar_or_tuple_3_t = Union[T, Tuple[T, T, T]]
_scalar_or_tuple_4_t = Union[T, Tuple[T, T, T, T]]
_scalar_or_tuple_5_t = Union[T, Tuple[T, T, T, T, T]]
_scalar_or_tuple_6_t = Union[T, Tuple[T, T, T, T, T, T]]
Reported by Pylint.
Line: 12
Column: 1
T = TypeVar('T')
_scalar_or_tuple_any_t = Union[T, Tuple[T, ...]]
_scalar_or_tuple_1_t = Union[T, Tuple[T]]
_scalar_or_tuple_2_t = Union[T, Tuple[T, T]]
_scalar_or_tuple_3_t = Union[T, Tuple[T, T, T]]
_scalar_or_tuple_4_t = Union[T, Tuple[T, T, T, T]]
_scalar_or_tuple_5_t = Union[T, Tuple[T, T, T, T, T]]
_scalar_or_tuple_6_t = Union[T, Tuple[T, T, T, T, T, T]]
Reported by Pylint.
Line: 13
Column: 1
_scalar_or_tuple_any_t = Union[T, Tuple[T, ...]]
_scalar_or_tuple_1_t = Union[T, Tuple[T]]
_scalar_or_tuple_2_t = Union[T, Tuple[T, T]]
_scalar_or_tuple_3_t = Union[T, Tuple[T, T, T]]
_scalar_or_tuple_4_t = Union[T, Tuple[T, T, T, T]]
_scalar_or_tuple_5_t = Union[T, Tuple[T, T, T, T, T]]
_scalar_or_tuple_6_t = Union[T, Tuple[T, T, T, T, T, T]]
# For arguments which represent size parameters (eg, kernel size, padding)
Reported by Pylint.
Line: 14
Column: 1
_scalar_or_tuple_1_t = Union[T, Tuple[T]]
_scalar_or_tuple_2_t = Union[T, Tuple[T, T]]
_scalar_or_tuple_3_t = Union[T, Tuple[T, T, T]]
_scalar_or_tuple_4_t = Union[T, Tuple[T, T, T, T]]
_scalar_or_tuple_5_t = Union[T, Tuple[T, T, T, T, T]]
_scalar_or_tuple_6_t = Union[T, Tuple[T, T, T, T, T, T]]
# For arguments which represent size parameters (eg, kernel size, padding)
_size_any_t = _scalar_or_tuple_any_t[int]
Reported by Pylint.
caffe2/python/operator_test/bisect_percentile_op_test.py
26 issues
Line: 6
Column: 1
import hypothesis.strategies as st
from caffe2.python import core, workspace
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
Reported by Pylint.
Line: 9
Column: 1
import hypothesis.strategies as st
from caffe2.python import core, workspace
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import bisect
import numpy as np
Reported by Pylint.
Line: 113
Column: 9
pct_upper = np.array([0.1, 0.8, 1.0, 0.4, 1.0], dtype=np.float32)
pct_mapping = np.array([0.1, 0.5, 0.95, 0.25, 0.75], dtype=np.float32)
lengths = np.array([3, 2], dtype=np.int32)
self.compare_reference(
raw_data, pct_raw_data, pct_mapping, pct_lower, pct_upper, lengths)
@given(
N=st.integers(min_value=20, max_value=100),
lengths=st.lists(
Reported by Pylint.
Line: 129
Column: 55
**hu.gcs_cpu_only
)
def test_bisect_percentil_op_large(
self, N, lengths, max_value, discrete, p, gc, dc
):
lengths = np.array(lengths, dtype=np.int32)
D = len(lengths)
if discrete:
Reported by Pylint.
Line: 129
Column: 51
**hu.gcs_cpu_only
)
def test_bisect_percentil_op_large(
self, N, lengths, max_value, discrete, p, gc, dc
):
lengths = np.array(lengths, dtype=np.int32)
D = len(lengths)
if discrete:
Reported by Pylint.
Line: 176
Column: 9
raw_data = np.array(raw_data, dtype=np.float32)
pct_raw_data = np.array(pct_raw_data, dtype=np.float32)
self.compare_reference(
raw_data, pct_raw_data, pct_mapping, pct_lower, pct_upper, lengths)
if __name__ == "__main__":
import unittest
Reported by Pylint.
Line: 1
Column: 1
import hypothesis.strategies as st
from caffe2.python import core, workspace
from hypothesis import given
Reported by Pylint.
Line: 12
Column: 1
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import bisect
import numpy as np
class TestBisectPercentileOp(hu.HypothesisTestCase):
def compare_reference(
Reported by Pylint.
Line: 16
Column: 1
import numpy as np
class TestBisectPercentileOp(hu.HypothesisTestCase):
def compare_reference(
self,
raw_data,
pct_raw_data,
pct_mapping,
Reported by Pylint.
Line: 17
Column: 5
class TestBisectPercentileOp(hu.HypothesisTestCase):
def compare_reference(
self,
raw_data,
pct_raw_data,
pct_mapping,
pct_upper,
Reported by Pylint.
caffe2/python/modeling/parameter_sharing_test.py
26 issues
Line: 22
Column: 9
def test_parameter_sharing_default_scopes(self):
# Test no sharing default scopes
param_1 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_1, 'w')
with scope.NameScope('scope'):
param_2 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_2, 'scope/w')
with scope.NameScope('scope_2'):
param_3 = parameter_sharing_context.get_parameter_name('w')
Reported by Pylint.
Line: 25
Column: 13
self.assertEquals(param_1, 'w')
with scope.NameScope('scope'):
param_2 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_2, 'scope/w')
with scope.NameScope('scope_2'):
param_3 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_3, 'scope/scope_2/w')
def test_parameter_sharing_nested_scopes(self):
Reported by Pylint.
Line: 28
Column: 17
self.assertEquals(param_2, 'scope/w')
with scope.NameScope('scope_2'):
param_3 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_3, 'scope/scope_2/w')
def test_parameter_sharing_nested_scopes(self):
# Test parameter sharing
with scope.NameScope('global_scope'):
with ParameterSharing({'model_b': 'model_a'}):
Reported by Pylint.
Line: 35
Column: 17
with scope.NameScope('global_scope'):
with ParameterSharing({'model_b': 'model_a'}):
param_global = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_global, 'global_scope/w')
# This scope is overridden to match 'model_a'
with scope.NameScope('model_b'):
with ParameterSharing({'shared_scope': ''}):
param_4 = parameter_sharing_context.get_parameter_name(
'w')
Reported by Pylint.
Line: 41
Column: 25
with ParameterSharing({'shared_scope': ''}):
param_4 = parameter_sharing_context.get_parameter_name(
'w')
self.assertEquals(param_4, 'global_scope/model_a/w')
with scope.NameScope('shared_scope'):
param_5 = parameter_sharing_context.\
get_parameter_name('w')
self.assertEquals(param_5, 'global_scope/model_a/w')
# This scope is supposed to have not sharing
Reported by Pylint.
Line: 45
Column: 29
with scope.NameScope('shared_scope'):
param_5 = parameter_sharing_context.\
get_parameter_name('w')
self.assertEquals(param_5, 'global_scope/model_a/w')
# This scope is supposed to have not sharing
with scope.NameScope('model_c'):
with ParameterSharing({'shared_scope': ''}):
param_4 = parameter_sharing_context.get_parameter_name(
'w')
Reported by Pylint.
Line: 51
Column: 25
with ParameterSharing({'shared_scope': ''}):
param_4 = parameter_sharing_context.get_parameter_name(
'w')
self.assertEquals(param_4, 'global_scope/model_c/w')
with scope.NameScope('shared_scope'):
param_5 = parameter_sharing_context.\
get_parameter_name('w')
self.assertEquals(param_5, 'global_scope/model_c/w')
Reported by Pylint.
Line: 55
Column: 29
with scope.NameScope('shared_scope'):
param_5 = parameter_sharing_context.\
get_parameter_name('w')
self.assertEquals(param_5, 'global_scope/model_c/w')
def test_parameter_sharing_subscopes(self):
# Sharing only one of the subscopes
with ParameterSharing({'global_scope/b': 'global_scope/a'}):
with scope.NameScope('global_scope'):
Reported by Pylint.
Line: 62
Column: 17
with ParameterSharing({'global_scope/b': 'global_scope/a'}):
with scope.NameScope('global_scope'):
param_6 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_6, 'global_scope/w')
with scope.NameScope('a'):
param_7 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_7, 'global_scope/a/w')
with scope.NameScope('b'):
param_8 = parameter_sharing_context.get_parameter_name('w')
Reported by Pylint.
Line: 65
Column: 21
self.assertEquals(param_6, 'global_scope/w')
with scope.NameScope('a'):
param_7 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_7, 'global_scope/a/w')
with scope.NameScope('b'):
param_8 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_8, 'global_scope/a/w')
with scope.NameScope('c'):
param_9 = parameter_sharing_context.get_parameter_name('w')
Reported by Pylint.
caffe2/python/operator_test/rebatching_queue_test.py
25 issues
Line: 11
Column: 1
import numpy as np
import numpy.testing as npt
from hypothesis import given, settings
import hypothesis.strategies as st
import functools
Reported by Pylint.
Line: 12
Column: 1
import numpy.testing as npt
from hypothesis import given, settings
import hypothesis.strategies as st
import functools
def primefac(n):
Reported by Pylint.
Line: 54
Column: 13
workspace.RunNetOnce(net)
for idx in range(3):
self.assertEquals(workspace.FetchBlob(results[idx]), [1.0])
def test_rebatching_queue_multi_enqueue_dequeue(self):
net = core.Net('net')
workspace.FeedBlob(
"tensors", np.array([x for x in range(10)], np.int32)
Reported by Pylint.
Line: 220
Column: 25
### Consumers ###
outputs = []
def append(ins, outs):
# Extend is atomic
outputs.extend(ins[0].data.tolist())
consumer_steps = []
for i in range(num_consumers):
Reported by Pylint.
Line: 283
Column: 9
# We check that the outputs are a permutation of inputs
inputs.sort()
outputs.sort()
self.assertEquals(inputs, outputs)
if __name__ == "__main__":
import unittest
unittest.main()
Reported by Pylint.
Line: 1
Column: 1
from caffe2.python import core, workspace
from caffe2.python.test_util import TestCase
import numpy as np
import numpy.testing as npt
Reported by Pylint.
Line: 14
Column: 1
from hypothesis import given, settings
import hypothesis.strategies as st
import functools
def primefac(n):
ret = []
divisor = 2
Reported by Pylint.
Line: 17
Column: 1
import functools
def primefac(n):
ret = []
divisor = 2
while divisor * divisor <= n:
while (n % divisor) == 0:
ret.append(divisor)
Reported by Pylint.
Line: 17
Column: 1
import functools
def primefac(n):
ret = []
divisor = 2
while divisor * divisor <= n:
while (n % divisor) == 0:
ret.append(divisor)
Reported by Pylint.
Line: 30
Column: 1
return ret
class TestReBatchingQueue(TestCase):
def test_rebatching_queue_single_enqueue_dequeue(self):
net = core.Net('net')
tensors = [
net.ConstantFill([], 1, value=1.0, run_once=False)
Reported by Pylint.
caffe2/python/recurrent.py
25 issues
Line: 106
Column: 9
# also add to the output list the intermediate outputs of fwd_step that
# are used by backward.
ssa, blob_versions = core.get_ssa(cell_net.Proto())
scratches = [
blob
for blob, ver in viewitems(blob_versions)
if (ver > 0 and
blob in undefined and
Reported by Pylint.
Line: 230
Column: 5
[output_blob],
)
def map_to_dual_list(m):
return [str(x) for x in list(m.keys())] + \
[str(x) for x in list(m.values())]
backward_args = {}
if backward_cell_net is not None:
Reported by Pylint.
Line: 1
Column: 1
## @package recurrent
# Module caffe2.python.recurrent
from caffe2.python import core, workspace
from future.utils import viewitems, viewkeys
Reported by Pylint.
Line: 11
Column: 1
from caffe2.python import core, workspace
from future.utils import viewitems, viewkeys
def recurrent_net(
net, cell_net, inputs, initial_cell_inputs,
links, timestep=None, scope=None, outputs_with_grads=(0,),
recompute_blobs_on_backward=None, forward_only=False,
):
'''
Reported by Pylint.
Line: 11
Column: 1
from caffe2.python import core, workspace
from future.utils import viewitems, viewkeys
def recurrent_net(
net, cell_net, inputs, initial_cell_inputs,
links, timestep=None, scope=None, outputs_with_grads=(0,),
recompute_blobs_on_backward=None, forward_only=False,
):
'''
Reported by Pylint.
Line: 11
Column: 1
from caffe2.python import core, workspace
from future.utils import viewitems, viewkeys
def recurrent_net(
net, cell_net, inputs, initial_cell_inputs,
links, timestep=None, scope=None, outputs_with_grads=(0,),
recompute_blobs_on_backward=None, forward_only=False,
):
'''
Reported by Pylint.
Line: 11
Column: 1
from caffe2.python import core, workspace
from future.utils import viewitems, viewkeys
def recurrent_net(
net, cell_net, inputs, initial_cell_inputs,
links, timestep=None, scope=None, outputs_with_grads=(0,),
recompute_blobs_on_backward=None, forward_only=False,
):
'''
Reported by Pylint.
Line: 49
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
forward_only: if True, only forward steps are executed
'''
assert len(inputs) == 1, "Only one input blob is supported so far"
input_blobs = [str(i[0]) for i in inputs]
initial_input_blobs = [str(x[1]) for x in initial_cell_inputs]
op_name = net.NextName('recurrent')
Reported by Bandit.
Line: 55
Column: 5
initial_input_blobs = [str(x[1]) for x in initial_cell_inputs]
op_name = net.NextName('recurrent')
def s(name):
# We have to manually scope due to our internal/external blob
# relationships.
scope_name = op_name if scope is None else scope
return "{}/{}".format(str(scope_name), str(name))
Reported by Pylint.
Line: 91
Column: 17
recompute_blobs_on_backward = {str(b) for b in
recompute_blobs_on_backward}
for op in cell_net.Proto().op:
if not recompute_blobs_on_backward.isdisjoint(set(op.output)):
backward_cell_net.Proto().op.extend([op])
# This fires if other outputs than the declared
# are computed by the ops that are recomputed
assert set(op.output).issubset(recompute_blobs_on_backward)
Reported by Pylint.
caffe2/python/operator_test/pack_rnn_sequence_op_test.py
25 issues
Line: 9
Column: 1
from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
import numpy as np
class TestPackRNNSequenceOperator(serial.SerializedTestCase):
Reported by Pylint.
Line: 1
Column: 1
from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
import hypothesis.strategies as st
Reported by Pylint.
Line: 13
Column: 1
import numpy as np
class TestPackRNNSequenceOperator(serial.SerializedTestCase):
@serial.given(n=st.integers(0, 10), k=st.integers(1, 5),
dim=st.integers(1, 5), **hu.gcs_cpu_only)
def test_pack_rnn_seqence(self, n, k, dim, gc, dc):
lengths = np.random.randint(k, size=n).astype(np.int32) + 1
Reported by Pylint.
Line: 17
Column: 5
@serial.given(n=st.integers(0, 10), k=st.integers(1, 5),
dim=st.integers(1, 5), **hu.gcs_cpu_only)
def test_pack_rnn_seqence(self, n, k, dim, gc, dc):
lengths = np.random.randint(k, size=n).astype(np.int32) + 1
values = np.random.rand(sum(lengths), dim).astype(np.float32)
def pack_op(values, lengths):
T = max(lengths) if any(lengths) else 0
Reported by Pylint.
Line: 17
Column: 5
@serial.given(n=st.integers(0, 10), k=st.integers(1, 5),
dim=st.integers(1, 5), **hu.gcs_cpu_only)
def test_pack_rnn_seqence(self, n, k, dim, gc, dc):
lengths = np.random.randint(k, size=n).astype(np.int32) + 1
values = np.random.rand(sum(lengths), dim).astype(np.float32)
def pack_op(values, lengths):
T = max(lengths) if any(lengths) else 0
Reported by Pylint.
Line: 17
Column: 5
@serial.given(n=st.integers(0, 10), k=st.integers(1, 5),
dim=st.integers(1, 5), **hu.gcs_cpu_only)
def test_pack_rnn_seqence(self, n, k, dim, gc, dc):
lengths = np.random.randint(k, size=n).astype(np.int32) + 1
values = np.random.rand(sum(lengths), dim).astype(np.float32)
def pack_op(values, lengths):
T = max(lengths) if any(lengths) else 0
Reported by Pylint.
Line: 17
Column: 5
@serial.given(n=st.integers(0, 10), k=st.integers(1, 5),
dim=st.integers(1, 5), **hu.gcs_cpu_only)
def test_pack_rnn_seqence(self, n, k, dim, gc, dc):
lengths = np.random.randint(k, size=n).astype(np.int32) + 1
values = np.random.rand(sum(lengths), dim).astype(np.float32)
def pack_op(values, lengths):
T = max(lengths) if any(lengths) else 0
Reported by Pylint.
Line: 17
Column: 5
@serial.given(n=st.integers(0, 10), k=st.integers(1, 5),
dim=st.integers(1, 5), **hu.gcs_cpu_only)
def test_pack_rnn_seqence(self, n, k, dim, gc, dc):
lengths = np.random.randint(k, size=n).astype(np.int32) + 1
values = np.random.rand(sum(lengths), dim).astype(np.float32)
def pack_op(values, lengths):
T = max(lengths) if any(lengths) else 0
Reported by Pylint.
Line: 22
Column: 13
values = np.random.rand(sum(lengths), dim).astype(np.float32)
def pack_op(values, lengths):
T = max(lengths) if any(lengths) else 0
N = lengths.size
output = np.zeros((T, N) + values.shape[1:]).astype(np.float32)
offset = 0
for c in range(N):
for r in range(lengths[c]):
Reported by Pylint.
Line: 23
Column: 13
def pack_op(values, lengths):
T = max(lengths) if any(lengths) else 0
N = lengths.size
output = np.zeros((T, N) + values.shape[1:]).astype(np.float32)
offset = 0
for c in range(N):
for r in range(lengths[c]):
output[r][c] = values[offset + r]
Reported by Pylint.