The following issues were found
torch/nn/__init__.py
7 issues
Line: 1
Column: 1
from .modules import * # noqa: F403
from .parameter import Parameter, UninitializedParameter, UninitializedBuffer
from .parallel import DataParallel
from . import init
from . import utils
def factory_kwargs(kwargs):
r"""
Reported by Pylint.
Line: 2
Column: 1
from .modules import * # noqa: F403
from .parameter import Parameter, UninitializedParameter, UninitializedBuffer
from .parallel import DataParallel
from . import init
from . import utils
def factory_kwargs(kwargs):
r"""
Reported by Pylint.
Line: 3
Column: 1
from .modules import * # noqa: F403
from .parameter import Parameter, UninitializedParameter, UninitializedBuffer
from .parallel import DataParallel
from . import init
from . import utils
def factory_kwargs(kwargs):
r"""
Reported by Pylint.
Line: 4
Column: 1
from .modules import * # noqa: F403
from .parameter import Parameter, UninitializedParameter, UninitializedBuffer
from .parallel import DataParallel
from . import init
from . import utils
def factory_kwargs(kwargs):
r"""
Reported by Pylint.
Line: 5
Column: 1
from .parameter import Parameter, UninitializedParameter, UninitializedBuffer
from .parallel import DataParallel
from . import init
from . import utils
def factory_kwargs(kwargs):
r"""
Given kwargs, returns a canonicalized dict of factory kwargs that can be directly passed
Reported by Pylint.
Line: 1
Column: 1
from .modules import * # noqa: F403
from .parameter import Parameter, UninitializedParameter, UninitializedBuffer
from .parallel import DataParallel
from . import init
from . import utils
def factory_kwargs(kwargs):
r"""
Reported by Pylint.
Line: 39
Column: 5
raise TypeError(f"unexpected kwargs {kwargs.keys() - expected_keys}")
# guarantee no input kwargs is untouched
r = dict(kwargs.get("factory_kwargs", {}))
for k in simple_keys:
if k in kwargs:
if k in r:
raise TypeError(f"{k} specified twice, in **kwargs and in factory_kwargs")
r[k] = kwargs[k]
Reported by Pylint.
torch/package/__init__.py
7 issues
Line: 1
Column: 1
from .analyze.is_from_package import is_from_package
from .file_structure_representation import Directory
from .glob_group import GlobGroup
from .importer import (
Importer,
ObjMismatchError,
ObjNotFoundError,
OrderedImporter,
sys_importer,
Reported by Pylint.
Line: 2
Column: 1
from .analyze.is_from_package import is_from_package
from .file_structure_representation import Directory
from .glob_group import GlobGroup
from .importer import (
Importer,
ObjMismatchError,
ObjNotFoundError,
OrderedImporter,
sys_importer,
Reported by Pylint.
Line: 3
Column: 1
from .analyze.is_from_package import is_from_package
from .file_structure_representation import Directory
from .glob_group import GlobGroup
from .importer import (
Importer,
ObjMismatchError,
ObjNotFoundError,
OrderedImporter,
sys_importer,
Reported by Pylint.
Line: 4
Column: 1
from .analyze.is_from_package import is_from_package
from .file_structure_representation import Directory
from .glob_group import GlobGroup
from .importer import (
Importer,
ObjMismatchError,
ObjNotFoundError,
OrderedImporter,
sys_importer,
Reported by Pylint.
Line: 11
Column: 1
OrderedImporter,
sys_importer,
)
from .package_exporter import EmptyMatchError, PackageExporter, PackagingError
from .package_importer import PackageImporter
Reported by Pylint.
Line: 12
Column: 1
sys_importer,
)
from .package_exporter import EmptyMatchError, PackageExporter, PackagingError
from .package_importer import PackageImporter
Reported by Pylint.
Line: 1
Column: 1
from .analyze.is_from_package import is_from_package
from .file_structure_representation import Directory
from .glob_group import GlobGroup
from .importer import (
Importer,
ObjMismatchError,
ObjNotFoundError,
OrderedImporter,
sys_importer,
Reported by Pylint.
torch/package/_mock.py
7 issues
Line: 87
Column: 1
class MockedObject:
_name: str
def __new__(cls, *args, **kwargs):
# _suppress_err is set by us in the mocked module impl, so that we can
# construct instances of MockedObject to hand out to people looking up
# module attributes.
# Any other attempt to construct a MockedOject instance (say, in the
Reported by Pylint.
Line: 112
Column: 20
return f"MockedObject({self._name})"
def install_method(method_name):
def _not_implemented(self, *args, **kwargs):
raise NotImplementedError(
f"Object '{self._name}' was mocked out during packaging but it is being used in {method_name}"
)
Reported by Pylint.
Line: 115
Column: 24
def install_method(method_name):
def _not_implemented(self, *args, **kwargs):
raise NotImplementedError(
f"Object '{self._name}' was mocked out during packaging but it is being used in {method_name}"
)
setattr(MockedObject, method_name, _not_implemented)
Reported by Pylint.
Line: 1
Column: 1
_magic_methods = [
"__subclasscheck__",
"__hex__",
"__rmul__",
"__float__",
"__idiv__",
"__setattr__",
"__div__",
"__invert__",
Reported by Pylint.
Line: 84
Column: 1
]
class MockedObject:
_name: str
def __new__(cls, *args, **kwargs):
# _suppress_err is set by us in the mocked module impl, so that we can
# construct instances of MockedObject to hand out to people looking up
Reported by Pylint.
Line: 112
Column: 1
return f"MockedObject({self._name})"
def install_method(method_name):
def _not_implemented(self, *args, **kwargs):
raise NotImplementedError(
f"Object '{self._name}' was mocked out during packaging but it is being used in {method_name}"
)
Reported by Pylint.
Line: 115
Column: 1
def install_method(method_name):
def _not_implemented(self, *args, **kwargs):
raise NotImplementedError(
f"Object '{self._name}' was mocked out during packaging but it is being used in {method_name}"
)
setattr(MockedObject, method_name, _not_implemented)
Reported by Pylint.
torch/fx/immutable_collections.py
7 issues
Line: 1
Column: 1
_help_mutation = """\
If you are attempting to modify the kwargs or args of a torch.fx.Node object,
instead create a new copy of it and assign the copy to the node:
new_args = ... # copy and mutate args
node.args = new_args
"""
def _no_mutation(self, *args, **kwargs):
Reported by Pylint.
Line: 2
Column: 1
_help_mutation = """\
If you are attempting to modify the kwargs or args of a torch.fx.Node object,
instead create a new copy of it and assign the copy to the node:
new_args = ... # copy and mutate args
node.args = new_args
"""
def _no_mutation(self, *args, **kwargs):
Reported by Pylint.
Line: 10
Column: 1
"""
def _no_mutation(self, *args, **kwargs):
raise NotImplementedError(f"'{type(self).__name__}' object does not support mutation. {_help_mutation}")
def _create_immutable_container(base, mutable_functions):
container = type('immutable_' + base.__name__, (base,), {})
for attr in mutable_functions:
setattr(container, attr, _no_mutation)
Reported by Pylint.
Line: 18
Column: 1
setattr(container, attr, _no_mutation)
return container
immutable_list = _create_immutable_container(list,
['__delitem__', '__iadd__', '__imul__', '__setitem__', 'append',
'clear', 'extend', 'insert', 'pop', 'remove'])
immutable_list.__reduce__ = lambda self: (immutable_list, (tuple(iter(self)),))
immutable_dict = _create_immutable_container(dict, ['__delitem__', '__setitem__', 'clear', 'pop', 'popitem', 'update'])
Reported by Pylint.
Line: 19
Column: 1
return container
immutable_list = _create_immutable_container(list,
['__delitem__', '__iadd__', '__imul__', '__setitem__', 'append',
'clear', 'extend', 'insert', 'pop', 'remove'])
immutable_list.__reduce__ = lambda self: (immutable_list, (tuple(iter(self)),))
immutable_dict = _create_immutable_container(dict, ['__delitem__', '__setitem__', 'clear', 'pop', 'popitem', 'update'])
immutable_dict.__reduce__ = lambda self: (immutable_dict, (iter(self.items()),))
Reported by Pylint.
Line: 23
Column: 1
'clear', 'extend', 'insert', 'pop', 'remove'])
immutable_list.__reduce__ = lambda self: (immutable_list, (tuple(iter(self)),))
immutable_dict = _create_immutable_container(dict, ['__delitem__', '__setitem__', 'clear', 'pop', 'popitem', 'update'])
immutable_dict.__reduce__ = lambda self: (immutable_dict, (iter(self.items()),))
Reported by Pylint.
Line: 23
Column: 1
'clear', 'extend', 'insert', 'pop', 'remove'])
immutable_list.__reduce__ = lambda self: (immutable_list, (tuple(iter(self)),))
immutable_dict = _create_immutable_container(dict, ['__delitem__', '__setitem__', 'clear', 'pop', 'popitem', 'update'])
immutable_dict.__reduce__ = lambda self: (immutable_dict, (iter(self.items()),))
Reported by Pylint.
torch/utils/data/datapipes/map/callable.py
7 issues
Line: 9
Column: 3
try:
import dill
# XXX: By default, dill writes the Pickler dispatch table to inject its
# own logic there. This globally affects the behavior of the standard library
# pickler for any user who transitively depends on this module!
# Undo this extension to avoid altering the behavior of the pickler globally.
dill.extend(use_dill=False)
DILL_AVAILABLE = True
Reported by Pylint.
Line: 80
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b301-pickle
def __setstate__(self, state):
(self.datapipe, dill_function, self.args, self.kwargs) = state
if DILL_AVAILABLE:
self.fn = dill.loads(dill_function) # type: ignore[assignment]
else:
self.fn = dill_function # type: ignore[assignment]
Reported by Bandit.
Line: 1
Column: 1
import warnings
from typing import Callable, Dict, Optional, Tuple, TypeVar
from torch.utils.data import MapDataPipe, functional_datapipe
try:
import dill
# XXX: By default, dill writes the Pickler dispatch table to inject its
Reported by Pylint.
Line: 7
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle
from torch.utils.data import MapDataPipe, functional_datapipe
try:
import dill
# XXX: By default, dill writes the Pickler dispatch table to inject its
# own logic there. This globally affects the behavior of the standard library
# pickler for any user who transitively depends on this module!
# Undo this extension to avoid altering the behavior of the pickler globally.
Reported by Bandit.
Line: 18
Column: 1
except ImportError:
DILL_AVAILABLE = False
T_co = TypeVar('T_co', covariant=True)
# Default function to return each item directly
# In order to keep datapipe picklable, eliminates the usage
# of python lambda function
Reported by Pylint.
Line: 24
Column: 1
# Default function to return each item directly
# In order to keep datapipe picklable, eliminates the usage
# of python lambda function
def default_fn(data):
return data
@functional_datapipe('map')
class MapMapDataPipe(MapDataPipe[T_co]):
Reported by Pylint.
Line: 59
Column: 9
"Lambda function is not supported for pickle, please use "
"regular python function or functools.partial instead."
)
self.fn = fn # type: ignore[assignment]
self.args = () if fn_args is None else fn_args
self.kwargs = {} if fn_kwargs is None else fn_kwargs
def __len__(self) -> int:
return len(self.datapipe)
Reported by Pylint.
torch/fx/experimental/unification/multipledispatch/variadic.py
7 issues
Line: 3
Column: 1
import six
from .utils import typename
class VariadicSignatureType(type):
# checking if subclass is a subclass of self
def __subclasscheck__(cls, subclass):
other_type = (subclass.variadic_type if isvariadic(subclass)
Reported by Pylint.
Line: 73
Column: 1
)
class Variadic(six.with_metaclass(VariadicSignatureMeta)):
"""A class whose getitem method can be used to generate a new type
representing a specific variadic signature.
Examples
--------
>>> Variadic[int] # any number of int arguments
Reported by Pylint.
Line: 1
Column: 1
import six
from .utils import typename
class VariadicSignatureType(type):
# checking if subclass is a subclass of self
def __subclasscheck__(cls, subclass):
other_type = (subclass.variadic_type if isvariadic(subclass)
Reported by Pylint.
Line: 6
Column: 1
from .utils import typename
class VariadicSignatureType(type):
# checking if subclass is a subclass of self
def __subclasscheck__(cls, subclass):
other_type = (subclass.variadic_type if isvariadic(subclass)
else (subclass,))
return subclass is cls or all(
Reported by Pylint.
Line: 12
Column: 1
other_type = (subclass.variadic_type if isvariadic(subclass)
else (subclass,))
return subclass is cls or all(
issubclass(other, cls.variadic_type) for other in other_type # type: ignore[attr-defined]
)
def __eq__(cls, other):
"""
Return True if other has the same variadic type
Reported by Pylint.
Line: 65
Column: 1
" (Variadic[int] or Variadic[(int, float)]")
if not isinstance(variadic_type, tuple):
variadic_type = variadic_type,
return VariadicSignatureType(
'Variadic[%s]' % typename(variadic_type),
(),
dict(variadic_type=variadic_type, __slots__=())
)
Reported by Pylint.
Line: 73
Column: 1
)
class Variadic(six.with_metaclass(VariadicSignatureMeta)):
"""A class whose getitem method can be used to generate a new type
representing a specific variadic signature.
Examples
--------
>>> Variadic[int] # any number of int arguments
Reported by Pylint.
torch/testing/_deprecated.py
7 issues
Line: 30
Column: 51
return outer_wrapper
rand = warn_deprecated("Use torch.rand instead.")(torch.rand)
randn = warn_deprecated("Use torch.randn instead.")(torch.randn)
Reported by Pylint.
Line: 31
Column: 53
rand = warn_deprecated("Use torch.rand instead.")(torch.rand)
randn = warn_deprecated("Use torch.randn instead.")(torch.randn)
Reported by Pylint.
Line: 1
Column: 1
"""This module exists since the `torch.testing` exposed a lot of stuff that shouldn't have been public. Although this
was never documented anywhere, some other internal FB projects as well as downstream OSS projects might use this. Thus,
we don't internalize without warning, but still go through a deprecation cycle.
"""
import functools
import warnings
from typing import Any, Callable
Reported by Pylint.
Line: 2
Column: 1
"""This module exists since the `torch.testing` exposed a lot of stuff that shouldn't have been public. Although this
was never documented anywhere, some other internal FB projects as well as downstream OSS projects might use this. Thus,
we don't internalize without warning, but still go through a deprecation cycle.
"""
import functools
import warnings
from typing import Any, Callable
Reported by Pylint.
Line: 16
Column: 1
__all__ = ["rand", "randn"]
def warn_deprecated(instructions: str) -> Callable:
def outer_wrapper(fn: Callable) -> Callable:
msg = f"torch.testing.{fn.__name__} is deprecated and will be removed in the future. {instructions.strip()}"
@functools.wraps(fn)
def inner_wrapper(*args: Any, **kwargs: Any) -> Any:
Reported by Pylint.
Line: 17
Column: 5
def warn_deprecated(instructions: str) -> Callable:
def outer_wrapper(fn: Callable) -> Callable:
msg = f"torch.testing.{fn.__name__} is deprecated and will be removed in the future. {instructions.strip()}"
@functools.wraps(fn)
def inner_wrapper(*args: Any, **kwargs: Any) -> Any:
warnings.warn(msg, FutureWarning)
Reported by Pylint.
Line: 18
Column: 1
def warn_deprecated(instructions: str) -> Callable:
def outer_wrapper(fn: Callable) -> Callable:
msg = f"torch.testing.{fn.__name__} is deprecated and will be removed in the future. {instructions.strip()}"
@functools.wraps(fn)
def inner_wrapper(*args: Any, **kwargs: Any) -> Any:
warnings.warn(msg, FutureWarning)
return fn(*args, **kwargs)
Reported by Pylint.
tools/code_coverage/package/oss/run.py
7 issues
Line: 4
Column: 1
import os
import time
from ..tool import clang_coverage, gcc_coverage
from ..util.setting import TestList, TestPlatform
from ..util.utils import get_raw_profiles_folder, print_time
from .utils import get_oss_binary_file
Reported by Pylint.
Line: 5
Column: 1
import time
from ..tool import clang_coverage, gcc_coverage
from ..util.setting import TestList, TestPlatform
from ..util.utils import get_raw_profiles_folder, print_time
from .utils import get_oss_binary_file
def clang_run(tests: TestList) -> None:
Reported by Pylint.
Line: 6
Column: 1
from ..tool import clang_coverage, gcc_coverage
from ..util.setting import TestList, TestPlatform
from ..util.utils import get_raw_profiles_folder, print_time
from .utils import get_oss_binary_file
def clang_run(tests: TestList) -> None:
start_time = time.time()
Reported by Pylint.
Line: 7
Column: 1
from ..tool import clang_coverage, gcc_coverage
from ..util.setting import TestList, TestPlatform
from ..util.utils import get_raw_profiles_folder, print_time
from .utils import get_oss_binary_file
def clang_run(tests: TestList) -> None:
start_time = time.time()
for test in tests:
Reported by Pylint.
Line: 1
Column: 1
import os
import time
from ..tool import clang_coverage, gcc_coverage
from ..util.setting import TestList, TestPlatform
from ..util.utils import get_raw_profiles_folder, print_time
from .utils import get_oss_binary_file
Reported by Pylint.
Line: 10
Column: 1
from .utils import get_oss_binary_file
def clang_run(tests: TestList) -> None:
start_time = time.time()
for test in tests:
# raw_file
raw_file = os.path.join(get_raw_profiles_folder(), test.name + ".profraw")
# binary file
Reported by Pylint.
Line: 23
Column: 1
print_time("running binaries takes time: ", start_time, summary_time=True)
def gcc_run(tests: TestList) -> None:
start_time = time.time()
for test in tests:
# binary file
binary_file = get_oss_binary_file(test.name, test.test_type)
gcc_coverage.run_target(binary_file, test.test_type)
Reported by Pylint.
test/package/package_a/subpackage.py
7 issues
Line: 1
Column: 1
result = "package_a.subpackage"
class PackageASubpackageObject:
pass
def leaf_function(a, b):
return a + b
Reported by Pylint.
Line: 1
Column: 1
result = "package_a.subpackage"
class PackageASubpackageObject:
pass
def leaf_function(a, b):
return a + b
Reported by Pylint.
Line: 4
Column: 1
result = "package_a.subpackage"
class PackageASubpackageObject:
pass
def leaf_function(a, b):
return a + b
Reported by Pylint.
Line: 4
Column: 1
result = "package_a.subpackage"
class PackageASubpackageObject:
pass
def leaf_function(a, b):
return a + b
Reported by Pylint.
Line: 8
Column: 1
pass
def leaf_function(a, b):
return a + b
Reported by Pylint.
Line: 8
Column: 1
pass
def leaf_function(a, b):
return a + b
Reported by Pylint.
Line: 8
Column: 1
pass
def leaf_function(a, b):
return a + b
Reported by Pylint.
tools/code_coverage/package/oss/cov_json.py
7 issues
Line: 1
Column: 1
from ..tool import clang_coverage
from ..util.setting import CompilerType, Option, TestList, TestPlatform
from ..util.utils import check_compiler_type
from .init import detect_compiler_type # type: ignore[attr-defined]
from .run import clang_run, gcc_run
def get_json_report(test_list: TestList, options: Option) -> None:
cov_type = detect_compiler_type()
Reported by Pylint.
Line: 2
Column: 1
from ..tool import clang_coverage
from ..util.setting import CompilerType, Option, TestList, TestPlatform
from ..util.utils import check_compiler_type
from .init import detect_compiler_type # type: ignore[attr-defined]
from .run import clang_run, gcc_run
def get_json_report(test_list: TestList, options: Option) -> None:
cov_type = detect_compiler_type()
Reported by Pylint.
Line: 3
Column: 1
from ..tool import clang_coverage
from ..util.setting import CompilerType, Option, TestList, TestPlatform
from ..util.utils import check_compiler_type
from .init import detect_compiler_type # type: ignore[attr-defined]
from .run import clang_run, gcc_run
def get_json_report(test_list: TestList, options: Option) -> None:
cov_type = detect_compiler_type()
Reported by Pylint.
Line: 4
Column: 1
from ..tool import clang_coverage
from ..util.setting import CompilerType, Option, TestList, TestPlatform
from ..util.utils import check_compiler_type
from .init import detect_compiler_type # type: ignore[attr-defined]
from .run import clang_run, gcc_run
def get_json_report(test_list: TestList, options: Option) -> None:
cov_type = detect_compiler_type()
Reported by Pylint.
Line: 5
Column: 1
from ..util.setting import CompilerType, Option, TestList, TestPlatform
from ..util.utils import check_compiler_type
from .init import detect_compiler_type # type: ignore[attr-defined]
from .run import clang_run, gcc_run
def get_json_report(test_list: TestList, options: Option) -> None:
cov_type = detect_compiler_type()
check_compiler_type(cov_type)
Reported by Pylint.
Line: 1
Column: 1
from ..tool import clang_coverage
from ..util.setting import CompilerType, Option, TestList, TestPlatform
from ..util.utils import check_compiler_type
from .init import detect_compiler_type # type: ignore[attr-defined]
from .run import clang_run, gcc_run
def get_json_report(test_list: TestList, options: Option) -> None:
cov_type = detect_compiler_type()
Reported by Pylint.
Line: 8
Column: 1
from .run import clang_run, gcc_run
def get_json_report(test_list: TestList, options: Option) -> None:
cov_type = detect_compiler_type()
check_compiler_type(cov_type)
if cov_type == CompilerType.CLANG:
# run
if options.need_run:
Reported by Pylint.