The following issues were found
torch/distributed/elastic/utils/logging.py
6 issues
Line: 36
Column: 11
def _setup_logger(name: Optional[str] = None):
log = logging.getLogger(name)
log.setLevel(os.environ.get("LOGLEVEL", get_log_level()))
return log
def _derive_module_name(depth: int = 1) -> Optional[str]:
Reported by Pylint.
Line: 10
Column: 1
# LICENSE file in the root directory of this source tree.
import inspect
import logging
import os
import warnings
from typing import Optional
from torch.distributed.elastic.utils.log_level import get_log_level
Reported by Pylint.
Line: 64
Column: 12
filename = frame_info[1]
module_name = os.path.splitext(os.path.basename(filename))[0]
return module_name
except Exception as e:
warnings.warn(
f"Error deriving logger module name, using <None>. Exception: {e}",
RuntimeWarning,
)
return None
Reported by Pylint.
Line: 1
Column: 1
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import inspect
Reported by Pylint.
Line: 50
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
"""
try:
stack = inspect.stack()
assert depth < len(stack)
# FrameInfo is just a named tuple: (frame, filename, lineno, function, code_context, index)
frame_info = stack[depth]
module = inspect.getmodule(frame_info[0])
if module:
Reported by Bandit.
Line: 64
Column: 5
filename = frame_info[1]
module_name = os.path.splitext(os.path.basename(filename))[0]
return module_name
except Exception as e:
warnings.warn(
f"Error deriving logger module name, using <None>. Exception: {e}",
RuntimeWarning,
)
return None
Reported by Pylint.
tools/codegen/dest/__init__.py
6 issues
Line: 1
Column: 1
from .register_dispatch_key import RegisterDispatchKey as RegisterDispatchKey
from .native_functions import compute_native_function_declaration as compute_native_function_declaration
Reported by Pylint.
Line: 2
Column: 1
from .register_dispatch_key import RegisterDispatchKey as RegisterDispatchKey
from .native_functions import compute_native_function_declaration as compute_native_function_declaration
Reported by Pylint.
Line: 1
Column: 1
from .register_dispatch_key import RegisterDispatchKey as RegisterDispatchKey
from .native_functions import compute_native_function_declaration as compute_native_function_declaration
Reported by Pylint.
Line: 1
Column: 1
from .register_dispatch_key import RegisterDispatchKey as RegisterDispatchKey
from .native_functions import compute_native_function_declaration as compute_native_function_declaration
Reported by Pylint.
Line: 2
Column: 1
from .register_dispatch_key import RegisterDispatchKey as RegisterDispatchKey
from .native_functions import compute_native_function_declaration as compute_native_function_declaration
Reported by Pylint.
Line: 2
Column: 1
from .register_dispatch_key import RegisterDispatchKey as RegisterDispatchKey
from .native_functions import compute_native_function_declaration as compute_native_function_declaration
Reported by Pylint.
torch/distributed/elastic/utils/data/elastic_distributed_sampler.py
6 issues
Line: 53
Column: 13
def __iter__(self):
# deterministically shuffle based on epoch
g = torch.Generator()
g.manual_seed(self.epoch)
indices = (
torch.randperm(len(self.dataset) - self.start_index, generator=g)
.add(self.start_index)
.tolist()
Reported by Pylint.
Line: 56
Column: 13
g = torch.Generator()
g.manual_seed(self.epoch)
indices = (
torch.randperm(len(self.dataset) - self.start_index, generator=g)
.add(self.start_index)
.tolist()
)
# add extra samples to make it evenly divisible
Reported by Pylint.
Line: 1
Column: 1
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import math
Reported by Pylint.
Line: 53
Column: 9
def __iter__(self):
# deterministically shuffle based on epoch
g = torch.Generator()
g.manual_seed(self.epoch)
indices = (
torch.randperm(len(self.dataset) - self.start_index, generator=g)
.add(self.start_index)
.tolist()
Reported by Pylint.
Line: 63
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
# add extra samples to make it evenly divisible
indices += indices[: (self.total_size - len(indices))]
assert len(indices) == self.total_size
# subsample
indices = indices[self.rank : self.total_size : self.num_replicas]
assert len(indices) == self.num_samples
Reported by Bandit.
Line: 67
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
# subsample
indices = indices[self.rank : self.total_size : self.num_replicas]
assert len(indices) == self.num_samples
return iter(indices)
def __len__(self):
return self.num_samples
Reported by Bandit.
torch/distributed/elastic/multiprocessing/__init__.py
6 issues
Line: 240
Column: 9
error_file = os.path.join(clogdir, "error.json")
error_files[local_rank] = error_file
log.info(f"Setting worker{local_rank} reply file to: {error_file}")
envs[local_rank]["TORCHELASTIC_ERROR_FILE"] = error_file
context: PContext
if isinstance(entrypoint, str):
context = SubprocessContext(
Reported by Pylint.
Line: 85
Column: 1
log = get_logger()
def start_processes(
name: str,
entrypoint: Union[Callable, str],
args: Dict[int, Tuple],
envs: Dict[int, Dict[str, str]],
log_dir: str,
Reported by Pylint.
Line: 85
Column: 1
log = get_logger()
def start_processes(
name: str,
entrypoint: Union[Callable, str],
args: Dict[int, Tuple],
envs: Dict[int, Dict[str, str]],
log_dir: str,
Reported by Pylint.
Line: 208
Column: 5
# |- ...
# |- (nprocs-1)
redirs = to_map(redirects, nprocs)
ts = to_map(tee, nprocs)
# to tee stdout/stderr we first redirect into a file
# then tail -f stdout.log/stderr.log so add tee settings to redirects
for local_rank, tee_std in ts.items():
redirect_std = redirs[local_rank]
Reported by Pylint.
Line: 226
Column: 9
clogdir = os.path.join(log_dir, str(local_rank))
os.mkdir(clogdir)
rd = redirs[local_rank]
if (rd & Std.OUT) == Std.OUT:
stdouts[local_rank] = os.path.join(clogdir, "stdout.log")
if (rd & Std.ERR) == Std.ERR:
stderrs[local_rank] = os.path.join(clogdir, "stderr.log")
Reported by Pylint.
Line: 232
Column: 9
if (rd & Std.ERR) == Std.ERR:
stderrs[local_rank] = os.path.join(clogdir, "stderr.log")
t = ts[local_rank]
if t & Std.OUT == Std.OUT:
tee_stdouts[local_rank] = stdouts[local_rank]
if t & Std.ERR == Std.ERR:
tee_stderrs[local_rank] = stderrs[local_rank]
Reported by Pylint.
torch/distributed/elastic/metrics/__init__.py
6 issues
Line: 140
Column: 1
from typing import Optional
from .api import ( # noqa: F401
ConsoleMetricHandler,
MetricData,
MetricHandler,
MetricsConfig,
NullMetricHandler,
Reported by Pylint.
Line: 161
Column: 5
try:
from torch.distributed.elastic.metrics.static_init import * # type: ignore[import] # noqa: F401 F403
except ModuleNotFoundError:
pass
Reported by Pylint.
Line: 161
Column: 5
try:
from torch.distributed.elastic.metrics.static_init import * # type: ignore[import] # noqa: F401 F403
except ModuleNotFoundError:
pass
Reported by Pylint.
Line: 156
Column: 24
)
def initialize_metrics(cfg: Optional[MetricsConfig] = None):
pass
try:
from torch.distributed.elastic.metrics.static_init import * # type: ignore[import] # noqa: F401 F403
Reported by Pylint.
Line: 156
Column: 1
)
def initialize_metrics(cfg: Optional[MetricsConfig] = None):
pass
try:
from torch.distributed.elastic.metrics.static_init import * # type: ignore[import] # noqa: F401 F403
Reported by Pylint.
Line: 161
Column: 1
try:
from torch.distributed.elastic.metrics.static_init import * # type: ignore[import] # noqa: F401 F403
except ModuleNotFoundError:
pass
Reported by Pylint.
torch/csrc/api/src/serialize/input-archive.cpp
6 issues
Line: 125
Column: 7
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
return 0;
}
size_t nread = std::min(static_cast<size_t>(pos) + n, size_) - pos;
memcpy(buf, data_ + pos, nread);
return nread;
}
private:
const char* data_;
size_t size_;
Reported by FlawFinder.
Line: 21
Column: 20
CWE codes:
120
20
InputArchive::InputArchive() : module_("Module", std::make_shared<jit::CompilationUnit>()) {}
void InputArchive::read(const std::string& key, c10::IValue& ivalue) {
ivalue = module_.attr(key);
}
bool InputArchive::try_read(
const std::string& key,
Reported by FlawFinder.
Line: 86
Column: 20
CWE codes:
120
20
return true;
}
void InputArchive::read(const std::string& key, InputArchive& archive) {
TORCH_CHECK(
try_read(key, archive),
"No such serialized submodule: '",
hierarchy_prefix_,
key,
Reported by FlawFinder.
Line: 118
Column: 12
CWE codes:
120
20
: data_(data), size_(size) {
}
size_t size() const override { return size_; }
size_t read(uint64_t pos, void* buf, size_t n, const char* what = "")
const override {
(void) what;
if (pos >= size_) {
return 0;
}
Reported by FlawFinder.
Line: 150
Column: 12
CWE codes:
120
20
size_func_(size_func) {
}
size_t size() const override { return size_func_(); }
size_t read(uint64_t pos, void* buf, size_t n, const char* what = "")
const override {
(void)what;
return read_func_(pos, buf, n);
}
private:
Reported by FlawFinder.
tools/autograd/context.py
6 issues
Line: 1
Column: 1
from tools.codegen.api.autograd import NativeFunctionWithDifferentiabilityInfo as NFWDI
from tools.codegen.context import native_function_manager
from tools.codegen.utils import T
import functools
from typing import Callable
# Like tools.api.context.with_native_function, but for
# NativeFunctionWithDifferentiabilityInfo.
Reported by Pylint.
Line: 5
Column: 1
from tools.codegen.context import native_function_manager
from tools.codegen.utils import T
import functools
from typing import Callable
# Like tools.api.context.with_native_function, but for
# NativeFunctionWithDifferentiabilityInfo.
def with_native_function_with_differentiability_info(func: Callable[[NFWDI], T]) -> Callable[[NFWDI], T]:
Reported by Pylint.
Line: 6
Column: 1
from tools.codegen.utils import T
import functools
from typing import Callable
# Like tools.api.context.with_native_function, but for
# NativeFunctionWithDifferentiabilityInfo.
def with_native_function_with_differentiability_info(func: Callable[[NFWDI], T]) -> Callable[[NFWDI], T]:
@functools.wraps(func)
Reported by Pylint.
Line: 10
Column: 1
# Like tools.api.context.with_native_function, but for
# NativeFunctionWithDifferentiabilityInfo.
def with_native_function_with_differentiability_info(func: Callable[[NFWDI], T]) -> Callable[[NFWDI], T]:
@functools.wraps(func)
def wrapper(f: NFWDI) -> T:
with native_function_manager(f.func):
return func(f)
return wrapper
Reported by Pylint.
Line: 10
Column: 1
# Like tools.api.context.with_native_function, but for
# NativeFunctionWithDifferentiabilityInfo.
def with_native_function_with_differentiability_info(func: Callable[[NFWDI], T]) -> Callable[[NFWDI], T]:
@functools.wraps(func)
def wrapper(f: NFWDI) -> T:
with native_function_manager(f.func):
return func(f)
return wrapper
Reported by Pylint.
Line: 12
Column: 5
# NativeFunctionWithDifferentiabilityInfo.
def with_native_function_with_differentiability_info(func: Callable[[NFWDI], T]) -> Callable[[NFWDI], T]:
@functools.wraps(func)
def wrapper(f: NFWDI) -> T:
with native_function_manager(f.func):
return func(f)
return wrapper
Reported by Pylint.
tools/code_coverage/package/tool/parser/llvm_coverage_parser.py
6 issues
Line: 3
Column: 1
from typing import Any, Dict, List, Set, Tuple
from .coverage_record import CoverageRecord
from .llvm_coverage_segment import LlvmCoverageSegment, parse_segments
class LlvmCoverageParser:
"""
Accepts a parsed json produced by llvm-cov export -- typically,
Reported by Pylint.
Line: 4
Column: 1
from typing import Any, Dict, List, Set, Tuple
from .coverage_record import CoverageRecord
from .llvm_coverage_segment import LlvmCoverageSegment, parse_segments
class LlvmCoverageParser:
"""
Accepts a parsed json produced by llvm-cov export -- typically,
Reported by Pylint.
Line: 48
Column: 21
uncovered_lines.difference_update(covered_lines)
return sorted(covered_lines), sorted(uncovered_lines)
def parse(self, repo_name: str) -> List[CoverageRecord]:
# The JSON format is described in the LLVM source code
# https://github.com/llvm-mirror/llvm/blob/master/tools/llvm-cov/CoverageExporterJson.cpp
records: List[CoverageRecord] = []
for export_unit in self._llvm_coverage["data"]:
for file_info in export_unit["files"]:
Reported by Pylint.
Line: 1
Column: 1
from typing import Any, Dict, List, Set, Tuple
from .coverage_record import CoverageRecord
from .llvm_coverage_segment import LlvmCoverageSegment, parse_segments
class LlvmCoverageParser:
"""
Accepts a parsed json produced by llvm-cov export -- typically,
Reported by Pylint.
Line: 7
Column: 1
from .llvm_coverage_segment import LlvmCoverageSegment, parse_segments
class LlvmCoverageParser:
"""
Accepts a parsed json produced by llvm-cov export -- typically,
representing a single C++ test and produces a list
of CoverageRecord(s).
Reported by Pylint.
Line: 48
Column: 5
uncovered_lines.difference_update(covered_lines)
return sorted(covered_lines), sorted(uncovered_lines)
def parse(self, repo_name: str) -> List[CoverageRecord]:
# The JSON format is described in the LLVM source code
# https://github.com/llvm-mirror/llvm/blob/master/tools/llvm-cov/CoverageExporterJson.cpp
records: List[CoverageRecord] = []
for export_unit in self._llvm_coverage["data"]:
for file_info in export_unit["files"]:
Reported by Pylint.
test/package/package_c/__init__.py
6 issues
Line: 1
Column: 1
result = "package_c"
class PackageCObject:
__slots__ = ["obj"]
def __init__(self, obj):
self.obj = obj
Reported by Pylint.
Line: 1
Column: 1
result = "package_c"
class PackageCObject:
__slots__ = ["obj"]
def __init__(self, obj):
self.obj = obj
Reported by Pylint.
Line: 4
Column: 1
result = "package_c"
class PackageCObject:
__slots__ = ["obj"]
def __init__(self, obj):
self.obj = obj
Reported by Pylint.
Line: 4
Column: 1
result = "package_c"
class PackageCObject:
__slots__ = ["obj"]
def __init__(self, obj):
self.obj = obj
Reported by Pylint.
Line: 10
Column: 5
def __init__(self, obj):
self.obj = obj
def return_result(self):
return result
Reported by Pylint.
Line: 10
Column: 5
def __init__(self, obj):
self.obj = obj
def return_result(self):
return result
Reported by Pylint.
test/package/package_b/__init__.py
6 issues
Line: 1
Column: 1
__import__("subpackage_1", globals(), fromlist=["PackageBSubpackage1Object_0"], level=1)
__import__("subpackage_0.subsubpackage_0", globals(), fromlist=[""], level=1)
__import__("subpackage_2", globals=globals(), locals=locals(), fromlist=["*"], level=1)
result = "package_b"
class PackageBObject:
__slots__ = ["obj"]
Reported by Pylint.
Line: 5
Column: 1
__import__("subpackage_0.subsubpackage_0", globals(), fromlist=[""], level=1)
__import__("subpackage_2", globals=globals(), locals=locals(), fromlist=["*"], level=1)
result = "package_b"
class PackageBObject:
__slots__ = ["obj"]
Reported by Pylint.
Line: 8
Column: 1
result = "package_b"
class PackageBObject:
__slots__ = ["obj"]
def __init__(self, obj):
self.obj = obj
Reported by Pylint.
Line: 8
Column: 1
result = "package_b"
class PackageBObject:
__slots__ = ["obj"]
def __init__(self, obj):
self.obj = obj
Reported by Pylint.
Line: 14
Column: 5
def __init__(self, obj):
self.obj = obj
def return_result(self):
return result
Reported by Pylint.
Line: 14
Column: 5
def __init__(self, obj):
self.obj = obj
def return_result(self):
return result
Reported by Pylint.