The following issues were found
.circleci/cimodel/lib/miniyaml.py
9 issues
Line: 1
Column: 1
from collections import OrderedDict
import cimodel.lib.miniutils as miniutils
LIST_MARKER = "- "
INDENTATION_WIDTH = 2
Reported by Pylint.
Line: 10
Column: 1
INDENTATION_WIDTH = 2
def is_dict(data):
return type(data) in [dict, OrderedDict]
def is_collection(data):
return is_dict(data) or type(data) is list
Reported by Pylint.
Line: 14
Column: 1
return type(data) in [dict, OrderedDict]
def is_collection(data):
return is_dict(data) or type(data) is list
def render(fh, data, depth, is_list_member=False):
"""
Reported by Pylint.
Line: 15
Column: 29
def is_collection(data):
return is_dict(data) or type(data) is list
def render(fh, data, depth, is_list_member=False):
"""
PyYaml does not allow precise control over the quoting
Reported by Pylint.
Line: 18
Column: 1
return is_dict(data) or type(data) is list
def render(fh, data, depth, is_list_member=False):
"""
PyYaml does not allow precise control over the quoting
behavior, especially for merge references.
Therefore, we use this custom YAML renderer.
"""
Reported by Pylint.
Line: 30
Column: 12
if is_dict(data):
tuples = list(data.items())
if type(data) is not OrderedDict:
tuples.sort()
for i, (k, v) in enumerate(tuples):
if not v:
continue
Reported by Pylint.
Line: 33
Column: 20
if type(data) is not OrderedDict:
tuples.sort()
for i, (k, v) in enumerate(tuples):
if not v:
continue
# If this dict is itself a list member, the first key gets prefixed with a list marker
list_marker_prefix = LIST_MARKER if is_list_member and not i else ""
Reported by Pylint.
Line: 44
Column: 10
render(fh, v, depth + 1 + int(is_list_member))
elif type(data) is list:
for v in data:
render(fh, v, depth, True)
else:
# use empty quotes to denote an empty string value instead of blank space
Reported by Pylint.
Line: 45
Column: 13
render(fh, v, depth + 1 + int(is_list_member))
elif type(data) is list:
for v in data:
render(fh, v, depth, True)
else:
# use empty quotes to denote an empty string value instead of blank space
modified_data = miniutils.quote(data) if data == "" else data
Reported by Pylint.
.circleci/cimodel/data/simple/ge_config_tests.py
9 issues
Line: 1
Column: 1
import cimodel.lib.miniutils as miniutils
from cimodel.data.simple.util.versions import MultiPartVersion, CudaVersion
from cimodel.data.simple.util.docker_constants import DOCKER_IMAGE_BASIC, DOCKER_IMAGE_CUDA_10_2
class GeConfigTestJob:
def __init__(self,
py_version,
gcc_version,
Reported by Pylint.
Line: 6
Column: 1
from cimodel.data.simple.util.docker_constants import DOCKER_IMAGE_BASIC, DOCKER_IMAGE_CUDA_10_2
class GeConfigTestJob:
def __init__(self,
py_version,
gcc_version,
cuda_version,
variant_parts,
Reported by Pylint.
Line: 7
Column: 5
class GeConfigTestJob:
def __init__(self,
py_version,
gcc_version,
cuda_version,
variant_parts,
extra_requires,
Reported by Pylint.
Line: 24
Column: 5
self.use_cuda_docker = use_cuda_docker
self.build_env_override = build_env_override
def get_all_parts(self, with_dots):
maybe_py_version = self.py_version.render_dots_or_parts(with_dots) if self.py_version else []
maybe_gcc_version = self.gcc_version.render_dots_or_parts(with_dots) if self.gcc_version else []
maybe_cuda_version = self.cuda_version.render_dots_or_parts(with_dots) if self.cuda_version else []
Reported by Pylint.
Line: 26
Column: 1
def get_all_parts(self, with_dots):
maybe_py_version = self.py_version.render_dots_or_parts(with_dots) if self.py_version else []
maybe_gcc_version = self.gcc_version.render_dots_or_parts(with_dots) if self.gcc_version else []
maybe_cuda_version = self.cuda_version.render_dots_or_parts(with_dots) if self.cuda_version else []
common_parts = [
"pytorch",
Reported by Pylint.
Line: 27
Column: 1
def get_all_parts(self, with_dots):
maybe_py_version = self.py_version.render_dots_or_parts(with_dots) if self.py_version else []
maybe_gcc_version = self.gcc_version.render_dots_or_parts(with_dots) if self.gcc_version else []
maybe_cuda_version = self.cuda_version.render_dots_or_parts(with_dots) if self.cuda_version else []
common_parts = [
"pytorch",
"linux",
Reported by Pylint.
Line: 28
Column: 1
maybe_py_version = self.py_version.render_dots_or_parts(with_dots) if self.py_version else []
maybe_gcc_version = self.gcc_version.render_dots_or_parts(with_dots) if self.gcc_version else []
maybe_cuda_version = self.cuda_version.render_dots_or_parts(with_dots) if self.cuda_version else []
common_parts = [
"pytorch",
"linux",
"xenial",
Reported by Pylint.
Line: 38
Column: 5
return common_parts + self.variant_parts
def gen_tree(self):
resource_class = "gpu.medium" if self.use_cuda_docker else "large"
docker_image = DOCKER_IMAGE_CUDA_10_2 if self.use_cuda_docker else DOCKER_IMAGE_BASIC
full_name = "_".join(self.get_all_parts(False))
build_env = self.build_env_override or "-".join(self.get_all_parts(True))
Reported by Pylint.
Line: 77
Column: 1
]
def get_workflow_jobs():
return [item.gen_tree() for item in WORKFLOW_DATA]
Reported by Pylint.
caffe2/python/layers/add_bias.py
9 issues
Line: 1
Column: 1
## @package add_bias
# Module caffe2.python.layers.add_bias
from caffe2.python import schema
from caffe2.python.layers.layers import ModelLayer
Reported by Pylint.
Line: 10
Column: 1
from caffe2.python import schema
from caffe2.python.layers.layers import ModelLayer
import math
class AddBias(ModelLayer):
def __init__(self, model, input_record, bias_init=None,
Reported by Pylint.
Line: 13
Column: 1
import math
class AddBias(ModelLayer):
def __init__(self, model, input_record, bias_init=None,
bias_optim=None, name='add_bias'):
super(AddBias, self).__init__(model, name, input_record)
assert isinstance(input_record, schema.Scalar), "Incorrect input type"
Reported by Pylint.
Line: 15
Column: 5
class AddBias(ModelLayer):
def __init__(self, model, input_record, bias_init=None,
bias_optim=None, name='add_bias'):
super(AddBias, self).__init__(model, name, input_record)
assert isinstance(input_record, schema.Scalar), "Incorrect input type"
assert len(input_record.field_type().shape) > 0, (
"AddBias expects limited dimensions of the input tensor")
Reported by Pylint.
Line: 17
Column: 9
def __init__(self, model, input_record, bias_init=None,
bias_optim=None, name='add_bias'):
super(AddBias, self).__init__(model, name, input_record)
assert isinstance(input_record, schema.Scalar), "Incorrect input type"
assert len(input_record.field_type().shape) > 0, (
"AddBias expects limited dimensions of the input tensor")
input_dims = input_record.field_type().shape[0]
Reported by Pylint.
Line: 18
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def __init__(self, model, input_record, bias_init=None,
bias_optim=None, name='add_bias'):
super(AddBias, self).__init__(model, name, input_record)
assert isinstance(input_record, schema.Scalar), "Incorrect input type"
assert len(input_record.field_type().shape) > 0, (
"AddBias expects limited dimensions of the input tensor")
input_dims = input_record.field_type().shape[0]
assert input_dims > 0, (
Reported by Bandit.
Line: 19
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
bias_optim=None, name='add_bias'):
super(AddBias, self).__init__(model, name, input_record)
assert isinstance(input_record, schema.Scalar), "Incorrect input type"
assert len(input_record.field_type().shape) > 0, (
"AddBias expects limited dimensions of the input tensor")
input_dims = input_record.field_type().shape[0]
assert input_dims > 0, (
"AddBias expects input dimensions > 0, got {}".format(input_dims))
Reported by Bandit.
Line: 23
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
"AddBias expects limited dimensions of the input tensor")
input_dims = input_record.field_type().shape[0]
assert input_dims > 0, (
"AddBias expects input dimensions > 0, got {}".format(input_dims))
scale = math.sqrt(1.0 / input_dims)
bias_init = bias_init if bias_init else (
'UniformFill', {'min': -scale, 'max': scale})
Reported by Bandit.
Line: 30
Column: 9
bias_init = bias_init if bias_init else (
'UniformFill', {'min': -scale, 'max': scale})
self.b = self.create_param(
param_name='b',
shape=[input_dims, ],
initializer=bias_init,
optimizer=bias_optim,
)
Reported by Pylint.
caffe2/python/gru_cell.py
9 issues
Line: 10
Column: 1
from caffe2.python import brew, rnn_cell
class GRUCell(rnn_cell.RNNCell):
def __init__(
self,
input_size,
hidden_size,
Reported by Pylint.
Line: 10
Column: 1
from caffe2.python import brew, rnn_cell
class GRUCell(rnn_cell.RNNCell):
def __init__(
self,
input_size,
hidden_size,
Reported by Pylint.
Line: 172
Column: 25
return self.hidden_size
GRU = functools.partial(rnn_cell._LSTM, GRUCell)
Reported by Pylint.
Line: 1
Column: 1
import functools
from caffe2.python import brew, rnn_cell
Reported by Pylint.
Line: 10
Column: 1
from caffe2.python import brew, rnn_cell
class GRUCell(rnn_cell.RNNCell):
def __init__(
self,
input_size,
hidden_size,
Reported by Pylint.
Line: 12
Column: 5
class GRUCell(rnn_cell.RNNCell):
def __init__(
self,
input_size,
hidden_size,
forget_bias, # Currently unused! Values here will be ignored.
memory_optimization,
Reported by Pylint.
Line: 22
Column: 9
linear_before_reset=False,
**kwargs
):
super(GRUCell, self).__init__(**kwargs)
self.input_size = input_size
self.hidden_size = hidden_size
self.forget_bias = float(forget_bias)
self.memory_optimization = memory_optimization
self.drop_states = drop_states
Reported by Pylint.
Line: 35
Column: 5
# So, much of the logic to calculate the reset gate output and modified
# output gate input is set here, in the graph definition.
# The remaining logic lives in in gru_unit_op.{h,cc}.
def _apply(
self,
model,
input_t,
seq_lengths,
states,
Reported by Pylint.
Line: 35
Column: 5
# So, much of the logic to calculate the reset gate output and modified
# output gate input is set here, in the graph definition.
# The remaining logic lives in in gru_unit_op.{h,cc}.
def _apply(
self,
model,
input_t,
seq_lengths,
states,
Reported by Pylint.
caffe2/python/helpers/arg_scope.py
9 issues
Line: 13
Column: 5
@contextlib.contextmanager
def arg_scope(single_helper_or_list, **kwargs):
global _threadlocal_scope
if not isinstance(single_helper_or_list, list):
assert callable(single_helper_or_list), \
"arg_scope is only supporting single or a list of helper functions."
single_helper_or_list = [single_helper_or_list]
old_scope = copy.deepcopy(get_current_scope())
Reported by Pylint.
Line: 32
Column: 5
def get_current_scope():
global _threadlocal_scope
if not hasattr(_threadlocal_scope, "current_scope"):
_threadlocal_scope.current_scope = {}
return _threadlocal_scope.current_scope
Reported by Pylint.
Line: 1
Column: 1
import contextlib
import copy
import threading
_threadlocal_scope = threading.local()
Reported by Pylint.
Line: 12
Column: 1
@contextlib.contextmanager
def arg_scope(single_helper_or_list, **kwargs):
global _threadlocal_scope
if not isinstance(single_helper_or_list, list):
assert callable(single_helper_or_list), \
"arg_scope is only supporting single or a list of helper functions."
single_helper_or_list = [single_helper_or_list]
Reported by Pylint.
Line: 13
Column: 5
@contextlib.contextmanager
def arg_scope(single_helper_or_list, **kwargs):
global _threadlocal_scope
if not isinstance(single_helper_or_list, list):
assert callable(single_helper_or_list), \
"arg_scope is only supporting single or a list of helper functions."
single_helper_or_list = [single_helper_or_list]
old_scope = copy.deepcopy(get_current_scope())
Reported by Pylint.
Line: 15
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def arg_scope(single_helper_or_list, **kwargs):
global _threadlocal_scope
if not isinstance(single_helper_or_list, list):
assert callable(single_helper_or_list), \
"arg_scope is only supporting single or a list of helper functions."
single_helper_or_list = [single_helper_or_list]
old_scope = copy.deepcopy(get_current_scope())
for helper in single_helper_or_list:
assert callable(helper), \
Reported by Bandit.
Line: 20
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
single_helper_or_list = [single_helper_or_list]
old_scope = copy.deepcopy(get_current_scope())
for helper in single_helper_or_list:
assert callable(helper), \
"arg_scope is only supporting a list of callable helper functions."
helper_key = helper.__name__
if helper_key not in old_scope:
_threadlocal_scope.current_scope[helper_key] = {}
_threadlocal_scope.current_scope[helper_key].update(kwargs)
Reported by Bandit.
Line: 31
Column: 1
_threadlocal_scope.current_scope = old_scope
def get_current_scope():
global _threadlocal_scope
if not hasattr(_threadlocal_scope, "current_scope"):
_threadlocal_scope.current_scope = {}
return _threadlocal_scope.current_scope
Reported by Pylint.
Line: 32
Column: 5
def get_current_scope():
global _threadlocal_scope
if not hasattr(_threadlocal_scope, "current_scope"):
_threadlocal_scope.current_scope = {}
return _threadlocal_scope.current_scope
Reported by Pylint.
caffe2/python/cached_reader.py
9 issues
Line: 16
Column: 1
from caffe2.python.task import Cluster, TaskGroup
class CachedReader(DBFileReader):
default_name_suffix = 'cached_reader'
"""Reader with persistent in-file cache.
Reported by Pylint.
Line: 82
Column: 1
loop_over,
)
def _init_reader_schema(self, *args, **kwargs):
"""Prepare the reader schema.
Since an original reader is given,
use it's schema as ground truth.
Reported by Pylint.
Line: 82
Column: 1
loop_over,
)
def _init_reader_schema(self, *args, **kwargs):
"""Prepare the reader schema.
Since an original reader is given,
use it's schema as ground truth.
Reported by Pylint.
Line: 91
Column: 16
Returns:
schema: schema.Struct. Used in Reader.__init__(...).
"""
return self.original_reader._schema
def build_cache_step(self, overwrite=False):
"""Build a step for generating cache DB file.
If self.db_path exists and not overwritting, build an empty step.
Reported by Pylint.
Line: 1
Column: 1
## @package cached_reader
# Module caffe2.python.cached_reader
import os
Reported by Pylint.
Line: 16
Column: 1
from caffe2.python.task import Cluster, TaskGroup
class CachedReader(DBFileReader):
default_name_suffix = 'cached_reader'
"""Reader with persistent in-file cache.
Reported by Pylint.
Line: 62
Column: 5
If True given, will go through examples in random order endlessly.
Defaults to False.
"""
def __init__(
self,
original_reader,
db_path,
db_type='LevelDB',
name=None,
Reported by Pylint.
Line: 71
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
batch_size=100,
loop_over=False,
):
assert original_reader is not None, "original_reader can't be None"
self.original_reader = original_reader
super(CachedReader, self).__init__(
db_path,
db_type,
Reported by Bandit.
Line: 74
Column: 9
assert original_reader is not None, "original_reader can't be None"
self.original_reader = original_reader
super(CachedReader, self).__init__(
db_path,
db_type,
name,
batch_size,
loop_over,
Reported by Pylint.
caffe2/contrib/playground/resnetdemo/caffe2_resnet50_default_param_update.py
9 issues
Line: 7
Column: 47
def gen_param_update_builder_fun(self, model, dataset, is_train):
if not is_train:
return None
else:
def add_parameter_update_ops(model):
model.AddWeightDecay(1e-4)
Reported by Pylint.
Line: 7
Column: 40
def gen_param_update_builder_fun(self, model, dataset, is_train):
if not is_train:
return None
else:
def add_parameter_update_ops(model):
model.AddWeightDecay(1e-4)
Reported by Pylint.
Line: 1
Column: 1
def gen_param_update_builder_fun(self, model, dataset, is_train):
if not is_train:
return None
Reported by Pylint.
Line: 7
Column: 1
def gen_param_update_builder_fun(self, model, dataset, is_train):
if not is_train:
return None
else:
def add_parameter_update_ops(model):
model.AddWeightDecay(1e-4)
Reported by Pylint.
Line: 8
Column: 5
def gen_param_update_builder_fun(self, model, dataset, is_train):
if not is_train:
return None
else:
def add_parameter_update_ops(model):
model.AddWeightDecay(1e-4)
ITER = model.Iter("ITER")
Reported by Pylint.
Line: 13
Column: 13
else:
def add_parameter_update_ops(model):
model.AddWeightDecay(1e-4)
ITER = model.Iter("ITER")
stepsz = int(30 *
self.opts['epoch_iter']['num_train_sample_per_epoch'] /
self.total_batch_size)
LR = model.net.LearningRate(
[ITER],
Reported by Pylint.
Line: 17
Column: 13
stepsz = int(30 *
self.opts['epoch_iter']['num_train_sample_per_epoch'] /
self.total_batch_size)
LR = model.net.LearningRate(
[ITER],
"lr",
base_lr=self.opts['model_param']['base_learning_rate'],
policy="step",
stepsize=stepsz,
Reported by Pylint.
Line: 27
Column: 1
)
params = model.GetParams()
assert(len(params) > 0)
for param in params:
param_grad = model.param_to_grad[param]
param_momentum = model.param_init_net.ConstantFill(
[param], param + '_momentum', value=0.0
)
Reported by Pylint.
Line: 27
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
)
params = model.GetParams()
assert(len(params) > 0)
for param in params:
param_grad = model.param_to_grad[param]
param_momentum = model.param_init_net.ConstantFill(
[param], param + '_momentum', value=0.0
)
Reported by Bandit.
caffe2/python/modeling/initializers.py
9 issues
Line: 52
Column: 3
param = ScopedBlobReference(param_name, init_net)
else:
raise TypeError("Unsupported type for param_name")
# TODO(amalevich): Add operator that will check param in the workspace
return ParameterInfo(
param_id=None,
param=param,
shape=shape,
)
Reported by Pylint.
Line: 1
Column: 1
from caffe2.python.core import DataType, BlobReference, ScopedBlobReference
from caffe2.python.modeling.parameter_info import ParameterInfo
Reported by Pylint.
Line: 10
Column: 1
from caffe2.python.modeling.parameter_info import ParameterInfo
class Initializer(object):
'''
This class abstracts out parameter creation. One can come up with a new
Initializer in order to implement more complex parameter initialization logic
'''
Reported by Pylint.
Line: 20
Column: 5
self.operator_name = operator_name
self.operator_kwargs = kwargs
def update(self, operator_name, kwargs):
if self.operator_name is not None:
raise Exception("Operator name overwrites are not allowed")
self.operator_name = operator_name
self.operator_kwargs = kwargs
Reported by Pylint.
Line: 26
Column: 5
self.operator_name = operator_name
self.operator_kwargs = kwargs
def create_param(self, param_name, init_net, shape):
param = init_net.__getattr__(self.operator_name)(
[], param_name, shape=shape, **self.operator_kwargs)
return ParameterInfo(
param_id=None,
param=param,
Reported by Pylint.
Line: 36
Column: 1
)
class ExternalInitializer(object):
'''
This class is used in cases when the parameter should not be initialized by
the initializer, but rather provided in the workspace when param_init_net is
executed.
Reported by Pylint.
Line: 36
Column: 1
)
class ExternalInitializer(object):
'''
This class is used in cases when the parameter should not be initialized by
the initializer, but rather provided in the workspace when param_init_net is
executed.
Reported by Pylint.
Line: 45
Column: 5
Current version is not doing any real sanity checks to the parameter.
'''
def create_param(self, param_name, init_net, shape):
if isinstance(param_name, BlobReference):
param = BlobReference(str(param_name), init_net)
elif isinstance(param_name, str):
param = ScopedBlobReference(param_name, init_net)
else:
Reported by Pylint.
Line: 45
Column: 5
Current version is not doing any real sanity checks to the parameter.
'''
def create_param(self, param_name, init_net, shape):
if isinstance(param_name, BlobReference):
param = BlobReference(str(param_name), init_net)
elif isinstance(param_name, str):
param = ScopedBlobReference(param_name, init_net)
else:
Reported by Pylint.
caffe2/contrib/playground/meter.py
9 issues
Line: 1
Column: 1
from abc import abstractmethod
class Meter(object):
Reported by Pylint.
Line: 9
Column: 1
from abc import abstractmethod
class Meter(object):
@abstractmethod
def __init__(self, **kwargs):
pass
Reported by Pylint.
Line: 9
Column: 1
from abc import abstractmethod
class Meter(object):
@abstractmethod
def __init__(self, **kwargs):
pass
Reported by Pylint.
Line: 16
Column: 5
pass
@abstractmethod
def Reset(self):
pass
@abstractmethod
def Add(self):
pass
Reported by Pylint.
Line: 16
Column: 5
pass
@abstractmethod
def Reset(self):
pass
@abstractmethod
def Add(self):
pass
Reported by Pylint.
Line: 20
Column: 5
pass
@abstractmethod
def Add(self):
pass
@abstractmethod
def Compute(self):
pass
Reported by Pylint.
Line: 20
Column: 5
pass
@abstractmethod
def Add(self):
pass
@abstractmethod
def Compute(self):
pass
Reported by Pylint.
Line: 24
Column: 5
pass
@abstractmethod
def Compute(self):
pass
Reported by Pylint.
Line: 24
Column: 5
pass
@abstractmethod
def Compute(self):
pass
Reported by Pylint.
caffe2/python/layers/split.py
9 issues
Line: 1
Column: 1
## @package split
# Module caffe2.python.layers.split
from caffe2.python import schema
from caffe2.python.layers.layers import (
Reported by Pylint.
Line: 14
Column: 1
)
class Split(ModelLayer):
def __init__(self, model, input_record, num_splits=1, axis=1,
name='split', split=None, **kwargs):
super(Split, self).__init__(model, name, input_record, **kwargs)
self.axis = axis
Reported by Pylint.
Line: 16
Column: 5
class Split(ModelLayer):
def __init__(self, model, input_record, num_splits=1, axis=1,
name='split', split=None, **kwargs):
super(Split, self).__init__(model, name, input_record, **kwargs)
self.axis = axis
# Assume that first dimension is batch, so actual axis in shape is
# axis - 1
Reported by Pylint.
Line: 18
Column: 9
def __init__(self, model, input_record, num_splits=1, axis=1,
name='split', split=None, **kwargs):
super(Split, self).__init__(model, name, input_record, **kwargs)
self.axis = axis
# Assume that first dimension is batch, so actual axis in shape is
# axis - 1
axis -= 1
assert axis >= 0
Reported by Pylint.
Line: 23
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
# Assume that first dimension is batch, so actual axis in shape is
# axis - 1
axis -= 1
assert axis >= 0
assert isinstance(input_record, schema.Scalar),\
"Incorrect input type. Expected Scalar, but received: {0}".\
format(input_record)
Reported by Bandit.
Line: 25
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
axis -= 1
assert axis >= 0
assert isinstance(input_record, schema.Scalar),\
"Incorrect input type. Expected Scalar, but received: {0}".\
format(input_record)
input_shape = input_record.field_type().shape
assert len(input_shape) >= axis
Reported by Bandit.
Line: 30
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
format(input_record)
input_shape = input_record.field_type().shape
assert len(input_shape) >= axis
if split is None:
assert input_shape[axis] % num_splits == 0
else:
num_splits = len(split)
assert input_shape[axis] == sum(split)
Reported by Bandit.
Line: 32
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
input_shape = input_record.field_type().shape
assert len(input_shape) >= axis
if split is None:
assert input_shape[axis] % num_splits == 0
else:
num_splits = len(split)
assert input_shape[axis] == sum(split)
if split is None:
Reported by Bandit.
Line: 35
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert input_shape[axis] % num_splits == 0
else:
num_splits = len(split)
assert input_shape[axis] == sum(split)
if split is None:
output_shape = list(input_shape)
output_shape[axis] = int(output_shape[axis] / num_splits)
else:
Reported by Bandit.