The following issues were found
caffe2/python/operator_test/image_input_op_test.py
60 issues
Line: 25
Column: 1
# version seem to differ slightly. It does test
# most other features
from hypothesis import given, settings, Verbosity
import hypothesis.strategies as st
from caffe2.proto import caffe2_pb2
import caffe2.python.hypothesis_test_util as hu
Reported by Pylint.
Line: 26
Column: 1
# most other features
from hypothesis import given, settings, Verbosity
import hypothesis.strategies as st
from caffe2.proto import caffe2_pb2
import caffe2.python.hypothesis_test_util as hu
from caffe2.python import workspace, core
Reported by Pylint.
Line: 37
Column: 5
# Verification routines (applies transformations to image to
# verify if the operator produces same result)
def verify_apply_bounding_box(img, box):
import skimage.util
if any(type(box[f]) is not int or np.isnan(box[f] or box[f] < 0)
for f in range(0, 4)):
return img
# Box is ymin, xmin, bound_height, bound_width
y_bounds = (box[0], img.shape[0] - box[0] - box[2])
Reported by Pylint.
Line: 78
Column: 5
def verify_crop(img, crop):
import skimage.util
assert img.shape[0] >= crop
assert img.shape[1] >= crop
y_offset = 0
if img.shape[0] > crop:
y_offset = (img.shape[0] - crop) // 2
Reported by Pylint.
Line: 20
Column: 3
import sys
import tempfile
# TODO: This test does not test scaling because
# the algorithms used by OpenCV in the C and Python
# version seem to differ slightly. It does test
# most other features
from hypothesis import given, settings, Verbosity
Reported by Pylint.
Line: 142
Column: 3
# Create a random bounding box for every other image
# ymin, xmin, bound_height, bound_width
# TODO: To ensure that we never need to scale, we
# ensure that the bounding-box is larger than the
# minsize parameter
bounding_box = list(default_bound)
do_default_bound = True
if index % 2 == 0:
Reported by Pylint.
Line: 241
Column: 3
def run_test(
size_tuple, means, stds, label_type, num_labels, is_test, scale_jitter_type,
color_jitter, color_lighting, dc, validator, output1=None, output2_size=None):
# TODO: Does not test on GPU and does not test use_gpu_transform
# WARNING: Using ModelHelper automatically does NHWC to NCHW
# transformation if needed.
width, height, minsize, crop = size_tuple
means = [float(m) for m in means]
stds = [float(s) for s in stds]
Reported by Pylint.
Line: 323
Column: 22
class TestImport(hu.HypothesisTestCase):
def validate_image_and_label(
self, expected_images, device_option, count_images, label_type,
is_test, scale_jitter_type, color_jitter, color_lighting):
l = workspace.FetchBlob('label')
result = workspace.FetchBlob('data').astype(np.int32)
# If we don't use_gpu_transform, the output is in NHWC
# Our reference output is CHW so we swap
if device_option.device_type != 1:
Reported by Pylint.
Line: 323
Column: 41
class TestImport(hu.HypothesisTestCase):
def validate_image_and_label(
self, expected_images, device_option, count_images, label_type,
is_test, scale_jitter_type, color_jitter, color_lighting):
l = workspace.FetchBlob('label')
result = workspace.FetchBlob('data').astype(np.int32)
# If we don't use_gpu_transform, the output is in NHWC
# Our reference output is CHW so we swap
if device_option.device_type != 1:
Reported by Pylint.
Line: 323
Column: 55
class TestImport(hu.HypothesisTestCase):
def validate_image_and_label(
self, expected_images, device_option, count_images, label_type,
is_test, scale_jitter_type, color_jitter, color_lighting):
l = workspace.FetchBlob('label')
result = workspace.FetchBlob('data').astype(np.int32)
# If we don't use_gpu_transform, the output is in NHWC
# Our reference output is CHW so we swap
if device_option.device_type != 1:
Reported by Pylint.
caffe2/python/operator_test/leaky_relu_test.py
60 issues
Line: 6
Column: 1
import numpy as np
from hypothesis import given, assume
import hypothesis.strategies as st
from caffe2.python import core, model_helper, utils
import caffe2.python.hypothesis_test_util as hu
Reported by Pylint.
Line: 7
Column: 1
import numpy as np
from hypothesis import given, assume
import hypothesis.strategies as st
from caffe2.python import core, model_helper, utils
import caffe2.python.hypothesis_test_util as hu
Reported by Pylint.
Line: 29
Column: 45
return input_data,
def _get_op(self, device_option, alpha, order, inplace=False):
outputs = ['output' if not inplace else "input"]
op = core.CreateOperator(
'LeakyRelu',
['input'],
outputs,
Reported by Pylint.
Line: 73
Column: 42
W=st.integers(7, 10),
alpha=st.floats(0, 1),
seed=st.integers(0, 1000))
def test_leaky_relu_layout(self, gc, dc, N, C, H, W, alpha, seed):
outputs = {}
for order in ('NCHW', 'NHWC'):
np.random.seed(seed)
input_blobs = self._get_inputs(N, C, H, W, order)
self._feed_inputs(input_blobs, device_option=gc)
Reported by Pylint.
Line: 101
Column: 51
alpha=st.floats(0, 1),
seed=st.integers(0, 1000),
inplace=st.booleans())
def test_leaky_relu_reference_check(self, gc, dc, N, C, H, W, order, alpha,
seed, inplace):
np.random.seed(seed)
if order != "NCHW":
assume(not inplace)
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
from hypothesis import given, assume
import hypothesis.strategies as st
from caffe2.python import core, model_helper, utils
Reported by Pylint.
Line: 13
Column: 1
import caffe2.python.hypothesis_test_util as hu
class TestLeakyRelu(hu.HypothesisTestCase):
def _get_inputs(self, N, C, H, W, order):
input_data = np.random.rand(N, C, H, W).astype(np.float32) - 0.5
# default step size is 0.05
Reported by Pylint.
Line: 15
Column: 5
class TestLeakyRelu(hu.HypothesisTestCase):
def _get_inputs(self, N, C, H, W, order):
input_data = np.random.rand(N, C, H, W).astype(np.float32) - 0.5
# default step size is 0.05
input_data[np.logical_and(
input_data >= 0, input_data <= 0.051)] = 0.051
Reported by Pylint.
Line: 15
Column: 5
class TestLeakyRelu(hu.HypothesisTestCase):
def _get_inputs(self, N, C, H, W, order):
input_data = np.random.rand(N, C, H, W).astype(np.float32) - 0.5
# default step size is 0.05
input_data[np.logical_and(
input_data >= 0, input_data <= 0.051)] = 0.051
Reported by Pylint.
Line: 15
Column: 5
class TestLeakyRelu(hu.HypothesisTestCase):
def _get_inputs(self, N, C, H, W, order):
input_data = np.random.rand(N, C, H, W).astype(np.float32) - 0.5
# default step size is 0.05
input_data[np.logical_and(
input_data >= 0, input_data <= 0.051)] = 0.051
Reported by Pylint.
test/package/test_save_load.py
60 issues
Line: 6
Column: 1
from textwrap import dedent
from unittest import skipIf
from torch.package import PackageExporter, PackageImporter, sys_importer
from torch.testing._internal.common_utils import run_tests, IS_FBCODE, IS_SANDCASTLE
try:
from .common import PackageTestCase
except ImportError:
Reported by Pylint.
Line: 7
Column: 1
from unittest import skipIf
from torch.package import PackageExporter, PackageImporter, sys_importer
from torch.testing._internal.common_utils import run_tests, IS_FBCODE, IS_SANDCASTLE
try:
from .common import PackageTestCase
except ImportError:
# Support the case where we run this file directly.
Reported by Pylint.
Line: 67
Column: 13
def test_save_module(self):
filename = self.temp()
with PackageExporter(filename) as he:
import module_a
import package_a
he.save_module(module_a.__name__)
he.save_module(package_a.__name__)
hi = PackageImporter(filename)
Reported by Pylint.
Line: 68
Column: 13
filename = self.temp()
with PackageExporter(filename) as he:
import module_a
import package_a
he.save_module(module_a.__name__)
he.save_module(package_a.__name__)
hi = PackageImporter(filename)
module_a_i = hi.import_module("module_a")
Reported by Pylint.
Line: 83
Column: 13
def test_dunder_imports(self):
buffer = BytesIO()
with PackageExporter(buffer) as he:
import package_b
obj = package_b.PackageBObject
he.intern("**")
he.save_pickle("res", "obj.pkl", obj)
Reported by Pylint.
Line: 122
Column: 13
def test_save_module_binary(self):
f = BytesIO()
with PackageExporter(f) as he:
import module_a
import package_a
he.save_module(module_a.__name__)
he.save_module(package_a.__name__)
f.seek(0)
Reported by Pylint.
Line: 123
Column: 13
f = BytesIO()
with PackageExporter(f) as he:
import module_a
import package_a
he.save_module(module_a.__name__)
he.save_module(package_a.__name__)
f.seek(0)
hi = PackageImporter(f)
Reported by Pylint.
Line: 141
Column: 9
"Tests that use temporary files are disabled in fbcode",
)
def test_pickle(self):
import package_a.subpackage
obj = package_a.subpackage.PackageASubpackageObject()
obj2 = package_a.PackageAObject(obj)
filename = self.temp()
Reported by Pylint.
Line: 173
Column: 9
"""
Directly saving/requiring an PackageImported module should raise a specific error message.
"""
import package_a.subpackage
obj = package_a.subpackage.PackageASubpackageObject()
obj2 = package_a.PackageAObject(obj)
f1 = self.temp()
with PackageExporter(f1) as pe:
Reported by Pylint.
Line: 200
Column: 9
packages, the user should get an error if they try to re-save the
object with the wrong package's source code.
"""
import package_a.subpackage
obj = package_a.subpackage.PackageASubpackageObject()
obj2 = package_a.PackageAObject(obj)
f1 = self.temp()
with PackageExporter(f1) as pe:
Reported by Pylint.
tools/codegen/api/cpp.py
60 issues
Line: 31
Column: 5
# collisions, but functions are fair game to collide
def name(func: FunctionSchema, *, faithful_name_for_out_overloads: bool = False) -> str:
name = str(func.name.name)
if func.is_out_fn():
if faithful_name_for_out_overloads:
name += '_outf'
else:
name += '_out'
Reported by Pylint.
Line: 83
Column: 3
elif isinstance(t, OptionalType):
if str(t.elem) == 'Tensor':
if mutable and not local.use_const_ref_for_mutable_tensors():
return NamedCType(binds, MutRefCType(BaseCType(tensorT))) # TODO: fix this discrepancy
else:
return NamedCType(binds, ConstRefCType(OptionalCType(BaseCType(tensorT))))
elif str(t.elem) == 'Scalar':
return NamedCType(binds, ConstRefCType(OptionalCType(BaseCType(scalarT))))
elem = argumenttype_type(t.elem, mutable=mutable, binds=binds)
Reported by Pylint.
Line: 91
Column: 3
elem = argumenttype_type(t.elem, mutable=mutable, binds=binds)
return NamedCType(binds, OptionalCType(elem.type))
elif isinstance(t, ListType):
# TODO: remove these special cases, ArrayRef fallthrough works fine
if str(t.elem) == 'int':
return NamedCType(binds, BaseCType(intArrayRefT))
elif str(t.elem) == 'Tensor':
return NamedCType(binds, BaseCType(tensorListT))
elif str(t.elem) == 'Scalar':
Reported by Pylint.
Line: 162
Column: 3
for i, r in enumerate(f.func.returns):
# If we have an inplace function, the return argument is
# implicitly named self.
# TODO: Consider incorporating this into the data model
if f.func.name.name.inplace:
assert i == 0, "illegal inplace function with multiple returns"
name = 'self'
# If we are out function, the name is the name of the
# corresponding output function (r.name will get recorded
Reported by Pylint.
Line: 165
Column: 13
# TODO: Consider incorporating this into the data model
if f.func.name.name.inplace:
assert i == 0, "illegal inplace function with multiple returns"
name = 'self'
# If we are out function, the name is the name of the
# corresponding output function (r.name will get recorded
# in field_name later.)
elif f.func.is_out_fn():
name = f.func.arguments.out[i].name
Reported by Pylint.
Line: 274
Column: 3
if all(x.default == "None" for x in a.all()):
default = '{}'
elif a.dtype.default == "long":
default = 'at::kLong' # TODO: this is wrong
return [Binding(
nctype=NamedCType('options', BaseCType(tensorOptionsT)),
name='options',
default=default,
argument=a,
Reported by Pylint.
Line: 291
Column: 5
assert_never(a)
def arguments(
arguments: Arguments,
*, faithful: bool, method: bool, cpp_no_default_args: Set[str]
) -> List[Binding]:
args: List[Union[Argument, TensorOptionsArguments, SelfArgument]] = []
if faithful:
args.extend(arguments.non_out)
Reported by Pylint.
Line: 1
Column: 1
from tools.codegen.model import (Argument, Arguments, BaseTy, BaseType,
FunctionSchema, ListType, NativeFunction,
OptionalType, Return, SelfArgument,
TensorOptionsArguments, Type, assert_never)
from tools.codegen.api.types import (ArgName, BaseCType, Binding, ConstRefCType, NamedCType, CType,
MutRefCType, ArrayCType, ListCType, VectorCType, ArrayRefCType,
OptionalCType, TupleCType, SpecialArgName, boolT, scalarT,
tensorListT, dimnameListT, tensorT, voidT,
BaseTypeToCppMapping, intArrayRefT, tensorOptionsT)
Reported by Pylint.
Line: 11
Column: 1
tensorListT, dimnameListT, tensorT, voidT,
BaseTypeToCppMapping, intArrayRefT, tensorOptionsT)
from tools.codegen import local
from typing import Optional, Sequence, Union, List, Set
# This file describes the translation of JIT schema to the public C++
# API, which is what people use when they call functions like at::add.
#
# Prominent characteristics of the C++ API:
Reported by Pylint.
Line: 30
Column: 1
# BTW: policy on name collisions: we try not to have types with
# collisions, but functions are fair game to collide
def name(func: FunctionSchema, *, faithful_name_for_out_overloads: bool = False) -> str:
name = str(func.name.name)
if func.is_out_fn():
if faithful_name_for_out_overloads:
name += '_outf'
else:
Reported by Pylint.
caffe2/python/dataio.py
60 issues
Line: 58
Column: 9
global_init_net: A net invoked at task init time.
global_finish_net: A net invoked at task cleanup time.
"""
pass
def read_ex(self, local_init_net, local_finish_net):
read_net = core.Net('reader_body')
return ([read_net], ) + self.read(read_net)
Reported by Pylint.
Line: 60
Column: 23
"""
pass
def read_ex(self, local_init_net, local_finish_net):
read_net = core.Net('reader_body')
return ([read_net], ) + self.read(read_net)
def read_record_ex(self, local_init_net, local_finish_net):
nets, should_stop, fields = self.read_ex(
Reported by Pylint.
Line: 60
Column: 39
"""
pass
def read_ex(self, local_init_net, local_finish_net):
read_net = core.Net('reader_body')
return ([read_net], ) + self.read(read_net)
def read_record_ex(self, local_init_net, local_finish_net):
nets, should_stop, fields = self.read_ex(
Reported by Pylint.
Line: 179
Column: 24
fields = fields.field_blobs()
self.write(writer_net, fields)
def setup_ex(self, init_net, finish_net):
"""Experimental, don't use yet"""
self.commit(finish_net)
def write_ex(self, fields, local_init_net, local_finish_net, stop_blob):
"""Experimental extension to the interface. Don't use yet"""
Reported by Pylint.
Line: 183
Column: 66
"""Experimental, don't use yet"""
self.commit(finish_net)
def write_ex(self, fields, local_init_net, local_finish_net, stop_blob):
"""Experimental extension to the interface. Don't use yet"""
write_net = core.Net('write_net')
self.write(write_net, fields)
return [write_net]
Reported by Pylint.
Line: 183
Column: 48
"""Experimental, don't use yet"""
self.commit(finish_net)
def write_ex(self, fields, local_init_net, local_finish_net, stop_blob):
"""Experimental extension to the interface. Don't use yet"""
write_net = core.Net('write_net')
self.write(write_net, fields)
return [write_net]
Reported by Pylint.
Line: 183
Column: 32
"""Experimental, don't use yet"""
self.commit(finish_net)
def write_ex(self, fields, local_init_net, local_finish_net, stop_blob):
"""Experimental extension to the interface. Don't use yet"""
write_net = core.Net('write_net')
self.write(write_net, fields)
return [write_net]
Reported by Pylint.
Line: 207
Column: 9
This must be implemented by all Writers, but may be no-op for some
of them.
"""
pass
class ReaderBuilder(object):
""" Allow usage of a reader in distributed fashion. """
def schema(self):
Reported by Pylint.
Line: 297
Column: 1
reader_init_net.add_attribute(self._obj_key, self)
class CounterReader(Reader):
""" Reader that produces increasing integers. """
def __init__(self):
Reader.__init__(self, schema=Struct(('iter', np.int64)))
self.counter = None
self.should_stop = None
Reported by Pylint.
Line: 297
Column: 1
reader_init_net.add_attribute(self._obj_key, self)
class CounterReader(Reader):
""" Reader that produces increasing integers. """
def __init__(self):
Reader.__init__(self, schema=Struct(('iter', np.int64)))
self.counter = None
self.should_stop = None
Reported by Pylint.
caffe2/python/dataio_test.py
60 issues
Line: 80
Column: 14
class TestCompositeReader(TestCase):
@unittest.skipIf(os.environ.get('JENKINS_URL'), 'Flaky test on Jenkins')
def test_composite_reader(self):
ws = workspace.C.Workspace()
session = LocalSession(ws)
num_srcs = 3
names = ["src_{}".format(i) for i in range(num_srcs)]
size = 100
offsets = [i * size for i in range(num_srcs)]
Reported by Pylint.
Line: 116
Column: 14
@unittest.skipIf(os.environ.get('JENKINS_URL'), 'Flaky test on Jenkins')
def test_composite_reader_builder(self):
ws = workspace.C.Workspace()
session = LocalSession(ws)
num_srcs = 3
names = ["src_{}".format(i) for i in range(num_srcs)]
size = 100
offsets = [i * size for i in range(num_srcs)]
Reported by Pylint.
Line: 153
Column: 14
class TestReaderWithLimit(TestCase):
def test_runtime_threads(self):
ws = workspace.C.Workspace()
session = LocalSession(ws)
src_ds = make_source_dataset(ws)
totals = [None] * 3
def proc(rec):
Reported by Pylint.
Line: 203
Column: 14
self.assertEqual(totals[2].fetch(), 6)
def _test_limit_reader_init_shared(self, size):
ws = workspace.C.Workspace()
session = LocalSession(ws)
# Make source dataset
src_ds = make_source_dataset(ws, size=size)
Reported by Pylint.
Line: 382
Column: 39
return ws.blobs[str(dst_ds.content().label())].fetch()
@unittest.skipIf("LevelDB" not in core.C.registered_dbs(), "Need LevelDB")
def test_cached_reader(self):
ws = workspace.C.Workspace()
session = LocalSession(ws)
db_path = self._make_temp_path()
Reported by Pylint.
Line: 384
Column: 14
@unittest.skipIf("LevelDB" not in core.C.registered_dbs(), "Need LevelDB")
def test_cached_reader(self):
ws = workspace.C.Workspace()
session = LocalSession(ws)
db_path = self._make_temp_path()
# Read data for the first time.
cached_reader1 = CachedReader(
Reported by Pylint.
Line: 422
Column: 39
self._delete_path(db_path)
@unittest.skipIf("LevelDB" not in core.C.registered_dbs(), "Need LevelDB")
def test_db_file_reader(self):
ws = workspace.C.Workspace()
session = LocalSession(ws)
db_path = self._make_temp_path()
Reported by Pylint.
Line: 424
Column: 14
@unittest.skipIf("LevelDB" not in core.C.registered_dbs(), "Need LevelDB")
def test_db_file_reader(self):
ws = workspace.C.Workspace()
session = LocalSession(ws)
db_path = self._make_temp_path()
# Build a cache DB file.
cached_reader = CachedReader(
Reported by Pylint.
Line: 45
Column: 34
return src_ds
def make_destination_dataset(ws, schema, name=None):
name = name or 'dst'
dst_init = core.Net('{}_init'.format(name))
with core.NameScope(name):
dst_ds = Dataset(schema, name=name)
dst_ds.init_empty(dst_init)
Reported by Pylint.
Line: 68
Column: 5
def schema(self):
return self._schema
def setup(self, ws):
self._src_ds = make_source_dataset(ws, offset=self._offset, size=self._size,
name=self._name)
return {}
def new_reader(self, **kwargs):
Reported by Pylint.
torch/utils/tensorboard/_pytorch_graph.py
60 issues
Line: 4
Column: 1
from collections import OrderedDict
from typing import Dict, Any
from tensorboard.compat.proto.config_pb2 import RunMetadata
from tensorboard.compat.proto.graph_pb2 import GraphDef
from tensorboard.compat.proto.step_stats_pb2 import StepStats, DeviceStepStats
from tensorboard.compat.proto.versions_pb2 import VersionDef
import torch
Reported by Pylint.
Line: 5
Column: 1
from typing import Dict, Any
from tensorboard.compat.proto.config_pb2 import RunMetadata
from tensorboard.compat.proto.graph_pb2 import GraphDef
from tensorboard.compat.proto.step_stats_pb2 import StepStats, DeviceStepStats
from tensorboard.compat.proto.versions_pb2 import VersionDef
import torch
from ._proto_graph import node_proto
Reported by Pylint.
Line: 6
Column: 1
from tensorboard.compat.proto.config_pb2 import RunMetadata
from tensorboard.compat.proto.graph_pb2 import GraphDef
from tensorboard.compat.proto.step_stats_pb2 import StepStats, DeviceStepStats
from tensorboard.compat.proto.versions_pb2 import VersionDef
import torch
from ._proto_graph import node_proto
Reported by Pylint.
Line: 7
Column: 1
from tensorboard.compat.proto.config_pb2 import RunMetadata
from tensorboard.compat.proto.graph_pb2 import GraphDef
from tensorboard.compat.proto.step_stats_pb2 import StepStats, DeviceStepStats
from tensorboard.compat.proto.versions_pb2 import VersionDef
import torch
from ._proto_graph import node_proto
methods_OP = ['attributeNames', 'hasMultipleOutputs', 'hasUses', 'inputs',
Reported by Pylint.
Line: 10
Column: 1
from tensorboard.compat.proto.versions_pb2 import VersionDef
import torch
from ._proto_graph import node_proto
methods_OP = ['attributeNames', 'hasMultipleOutputs', 'hasUses', 'inputs',
'kind', 'outputs', 'outputsSize', 'scopeName']
# Some additional methods to explure for methods_IO are
#
Reported by Pylint.
Line: 27
Column: 3
class NodeBase(object):
def __init__(self, debugName=None, inputs=None, scope=None, tensor_size=None, op_type='UnSpecified', attributes=''):
# TODO; Specify a __slots__ for this class or potentially
# used namedtuple instead
self.debugName = debugName
self.inputs = inputs
self.tensor_size = tensor_size
self.kind = op_type
Reported by Pylint.
Line: 37
Column: 9
self.scope = scope
def __repr__(self):
repr = []
repr.append(str(type(self)))
for m in dir(self):
if '__' not in m:
repr.append(m + ': ' + str(getattr(self, m)) + str(type(getattr(self, m))))
return '\n'.join(repr) + '\n\n'
Reported by Pylint.
Line: 92
Column: 3
def __init__(self, node_cpp):
super(NodePyOP, self).__init__(node_cpp, methods_OP)
# Replace single quote which causes strange behavior in TensorBoard
# TODO: See if we can remove this in the future
self.attributes = str({k: node_cpp[k] for k in node_cpp.attributeNames()}).replace("'", ' ')
self.kind = node_cpp.kind()
class GraphPy(object):
Reported by Pylint.
Line: 182
Column: 3
Converts graph representation of GraphPy object to TensorBoard
required format.
"""
# TODO: compute correct memory usage and CPU time once
# PyTorch supports it
nodes = []
for v in self.nodes_io.values():
nodes.append(node_proto(v.debugName,
input=v.inputs,
Reported by Pylint.
Line: 194
Column: 11
return nodes
def parse(graph, trace, args=None, omit_useless_nodes=True):
"""This method parses an optimized PyTorch model graph and produces
a list of nodes and node stats for eventual conversion to TensorBoard
protobuf format.
Args:
Reported by Pylint.
torch/jit/mobile/__init__.py
59 issues
Line: 48
Column: 22
map_location = validate_map_location(map_location)
if isinstance(f, str) or isinstance(f, pathlib.Path):
cpp_module = torch._C._load_for_lite_interpreter(f, map_location)
else:
cpp_module = torch._C._load_for_lite_interpreter_from_buffer(f.read(), map_location)
return LiteScriptModule(cpp_module)
Reported by Pylint.
Line: 48
Column: 22
map_location = validate_map_location(map_location)
if isinstance(f, str) or isinstance(f, pathlib.Path):
cpp_module = torch._C._load_for_lite_interpreter(f, map_location)
else:
cpp_module = torch._C._load_for_lite_interpreter_from_buffer(f.read(), map_location)
return LiteScriptModule(cpp_module)
Reported by Pylint.
Line: 50
Column: 22
if isinstance(f, str) or isinstance(f, pathlib.Path):
cpp_module = torch._C._load_for_lite_interpreter(f, map_location)
else:
cpp_module = torch._C._load_for_lite_interpreter_from_buffer(f.read(), map_location)
return LiteScriptModule(cpp_module)
class LiteScriptModule(object):
def __init__(self, cpp_module):
Reported by Pylint.
Line: 50
Column: 22
if isinstance(f, str) or isinstance(f, pathlib.Path):
cpp_module = torch._C._load_for_lite_interpreter(f, map_location)
else:
cpp_module = torch._C._load_for_lite_interpreter_from_buffer(f.read(), map_location)
return LiteScriptModule(cpp_module)
class LiteScriptModule(object):
def __init__(self, cpp_module):
Reported by Pylint.
Line: 59
Column: 1
self._c = cpp_module
super(LiteScriptModule, self).__init__()
def __call__(self, *input):
return self._c.forward(input)
def find_method(self, method_name):
return self._c.find_method(method_name)
Reported by Pylint.
Line: 65
Column: 1
def find_method(self, method_name):
return self._c.find_method(method_name)
def forward(self, *input):
return self._c.forward(input)
def run_method(self, method_name, *input):
return self._c.run_method(method_name, input)
Reported by Pylint.
Line: 68
Column: 1
def forward(self, *input):
return self._c.forward(input)
def run_method(self, method_name, *input):
return self._c.run_method(method_name, input)
def _export_operator_list(module: LiteScriptModule):
r"""
return a set of root operator names (with overload name) that are used by any method
Reported by Pylint.
Line: 76
Column: 12
return a set of root operator names (with overload name) that are used by any method
in this mobile module.
"""
return torch._C._export_operator_list(module._c)
def _get_model_bytecode_version(f_input) -> int:
r"""
Args:
f_input: a file-like object (has to implement read, readline, tell, and seek),
Reported by Pylint.
Line: 76
Column: 12
return a set of root operator names (with overload name) that are used by any method
in this mobile module.
"""
return torch._C._export_operator_list(module._c)
def _get_model_bytecode_version(f_input) -> int:
r"""
Args:
f_input: a file-like object (has to implement read, readline, tell, and seek),
Reported by Pylint.
Line: 76
Column: 43
return a set of root operator names (with overload name) that are used by any method
in this mobile module.
"""
return torch._C._export_operator_list(module._c)
def _get_model_bytecode_version(f_input) -> int:
r"""
Args:
f_input: a file-like object (has to implement read, readline, tell, and seek),
Reported by Pylint.
docs/source/conf.py
59 issues
Line: 25
Column: 1
# source code directory, relative to this file, for sphinx-autobuild
# sys.path.insert(0, os.path.abspath('../..'))
import torch
try:
import torchvision # noqa: F401
except ImportError:
import warnings
Reported by Pylint.
Line: 35
Column: 1
RELEASE = os.environ.get('RELEASE', False)
import pytorch_sphinx_theme
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
Reported by Pylint.
Line: 274
Column: 1
# in torch.html by overriding the visit_reference method of html writers.
# Someday this can be removed, once the old links fade away
from sphinx.writers import html, html5
def replace(Klass):
old_call = Klass.visit_reference
def visit_reference(self, node):
Reported by Pylint.
Line: 364
Column: 1
# -- A patch that prevents Sphinx from cross-referencing ivar tags -------
# See http://stackoverflow.com/a/41184353/3343043
from docutils import nodes
from sphinx.util.docfields import TypedField
from sphinx import addnodes
import sphinx.ext.doctest
# Without this, doctest adds any example with a `>>>` as a test
Reported by Pylint.
Line: 365
Column: 1
# See http://stackoverflow.com/a/41184353/3343043
from docutils import nodes
from sphinx.util.docfields import TypedField
from sphinx import addnodes
import sphinx.ext.doctest
# Without this, doctest adds any example with a `>>>` as a test
doctest_test_doctest_blocks = ''
Reported by Pylint.
Line: 366
Column: 1
from docutils import nodes
from sphinx.util.docfields import TypedField
from sphinx import addnodes
import sphinx.ext.doctest
# Without this, doctest adds any example with a `>>>` as a test
doctest_test_doctest_blocks = ''
doctest_default_flags = sphinx.ext.doctest.doctest.ELLIPSIS
Reported by Pylint.
Line: 367
Column: 1
from docutils import nodes
from sphinx.util.docfields import TypedField
from sphinx import addnodes
import sphinx.ext.doctest
# Without this, doctest adds any example with a `>>>` as a test
doctest_test_doctest_blocks = ''
doctest_default_flags = sphinx.ext.doctest.doctest.ELLIPSIS
doctest_global_setup = '''
Reported by Pylint.
Line: 28
Column: 5
import torch
try:
import torchvision # noqa: F401
except ImportError:
import warnings
warnings.warn('unable to load "torchvision" package')
RELEASE = os.environ.get('RELEASE', False)
Reported by Pylint.
Line: 80
Column: 3
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# TODO: document these and remove them from here.
coverage_ignore_modules = [
"torch.autograd",
"torch.cuda",
"torch.distributed",
Reported by Pylint.
Line: 111
Column: 3
"whichmodule",
"wrap_check_inputs",
# torch
# TODO: This should be documented eventually, but only after
# we build out more support for meta functions and actually
# do a release with it
"empty_meta",
]
Reported by Pylint.
caffe2/python/docs/generator.py
59 issues
Line: 13
Column: 12
from caffe2.python.docs.formatter import Markdown
from future.utils import viewitems, viewvalues
OpSchema = workspace.C.OpSchema
class DocUploader(object):
def __init__(self):
pass
Reported by Pylint.
Line: 46
Column: 9
def getOperators(self):
# map: op_name -> operator
self.operators = {}
# map: op_name -> [engine, engine]
self.engines = {}
def filePriority(x):
if x == "caffe2/caffe2/operators":
Reported by Pylint.
Line: 48
Column: 9
# map: op_name -> operator
self.operators = {}
# map: op_name -> [engine, engine]
self.engines = {}
def filePriority(x):
if x == "caffe2/caffe2/operators":
return 0
if 'contrib' in x.split('/'):
Reported by Pylint.
Line: 59
Column: 21
return 3
return 1
for name in core._GetRegisteredOperators():
schema = OpSchema.get(name)
if schema:
priority = filePriority(os.path.dirname(schema.file))
operator = self.getOperatorDoc(name, schema, priority)
self.operators[name] = operator
Reported by Pylint.
Line: 150
Column: 29
formatter.addTable(table, (table == []))
def generateInterface(self, formatter):
def makeDesc(title, args):
f = formatter.clone()
f.addEmphasis(title, 1)
out = [(f.dump(), '')]
for arg in args:
f = formatter.clone()
Reported by Pylint.
Line: 1
Column: 1
## @package generator
# Module caffe2.python.docs.generator
import argparse
import os
from caffe2.python import core, workspace
Reported by Pylint.
Line: 16
Column: 1
OpSchema = workspace.C.OpSchema
class DocUploader(object):
def __init__(self):
pass
def upload(self, text):
pass
Reported by Pylint.
Line: 16
Column: 1
OpSchema = workspace.C.OpSchema
class DocUploader(object):
def __init__(self):
pass
def upload(self, text):
pass
Reported by Pylint.
Line: 16
Column: 1
OpSchema = workspace.C.OpSchema
class DocUploader(object):
def __init__(self):
pass
def upload(self, text):
pass
Reported by Pylint.
Line: 20
Column: 5
def __init__(self):
pass
def upload(self, text):
pass
class DocGenerator(object):
def __init__(self, formatter, uploader):
Reported by Pylint.