The following issues were found
c10/util/AlignOf.h
7 issues
Line: 38
Column: 22
CWE codes:
119
120
Suggestion:
Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length
template <size_t Alignment, size_t Size>
struct AlignedCharArray {
alignas(Alignment) char buffer[Size];
};
#else // _MSC_VER
/// \brief Create a type with an aligned char buffer.
Reported by FlawFinder.
Line: 59
Column: 5
CWE codes:
119
120
Suggestion:
Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length
struct AlignedCharArray<1, Size> {
union {
char aligned;
char buffer[Size];
};
};
template <size_t Size>
struct AlignedCharArray<2, Size> {
Reported by FlawFinder.
Line: 67
Column: 5
CWE codes:
119
120
Suggestion:
Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length
struct AlignedCharArray<2, Size> {
union {
short aligned;
char buffer[Size];
};
};
template <size_t Size>
struct AlignedCharArray<4, Size> {
Reported by FlawFinder.
Line: 75
Column: 5
CWE codes:
119
120
Suggestion:
Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length
struct AlignedCharArray<4, Size> {
union {
int aligned;
char buffer[Size];
};
};
template <size_t Size>
struct AlignedCharArray<8, Size> {
Reported by FlawFinder.
Line: 83
Column: 5
CWE codes:
119
120
Suggestion:
Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length
struct AlignedCharArray<8, Size> {
union {
double aligned;
char buffer[Size];
};
};
// The rest of these are provided with a __declspec(align(...)) and we simply
// can't pass them by-value as function arguments on MSVC.
Reported by FlawFinder.
Line: 93
Column: 26
CWE codes:
119
120
Suggestion:
Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length
#define AT_ALIGNEDCHARARRAY_TEMPLATE_ALIGNMENT(x) \
template <size_t Size> \
struct AlignedCharArray<x, Size> { \
__declspec(align(x)) char buffer[Size]; \
};
AT_ALIGNEDCHARARRAY_TEMPLATE_ALIGNMENT(16)
AT_ALIGNEDCHARARRAY_TEMPLATE_ALIGNMENT(32)
AT_ALIGNEDCHARARRAY_TEMPLATE_ALIGNMENT(64)
Reported by FlawFinder.
Line: 144
Column: 3
CWE codes:
119
120
Suggestion:
Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length
typename T9 = char,
typename T10 = char>
union SizerImpl {
char arr1[sizeof(T1)], arr2[sizeof(T2)], arr3[sizeof(T3)], arr4[sizeof(T4)],
arr5[sizeof(T5)], arr6[sizeof(T6)], arr7[sizeof(T7)], arr8[sizeof(T8)],
arr9[sizeof(T9)], arr10[sizeof(T10)];
};
} // end namespace detail
Reported by FlawFinder.
caffe2/python/layers/select_record_by_context.py
7 issues
Line: 1
Column: 1
import logging
from caffe2.python import schema
from caffe2.python.layers.layers import (
Reported by Pylint.
Line: 25
Column: 5
sometimes clone fields internally so we need static blob name for output
"""
def __init__(
self,
model,
input_record,
name='select_record_by_context',
check_field_metas=True,
Reported by Pylint.
Line: 35
Column: 9
default_output_record_field=None,
**kwargs
):
super(SelectRecordByContext, self).__init__(model, name, input_record,
**kwargs)
assert isinstance(input_record, schema.Struct)
assert len(input_record) > 1
Reported by Pylint.
Line: 38
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
super(SelectRecordByContext, self).__init__(model, name, input_record,
**kwargs)
assert isinstance(input_record, schema.Struct)
assert len(input_record) > 1
self.use_copy = use_copy
self.default_output_record = (
input_record[default_output_record_field]
Reported by Bandit.
Line: 39
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
**kwargs)
assert isinstance(input_record, schema.Struct)
assert len(input_record) > 1
self.use_copy = use_copy
self.default_output_record = (
input_record[default_output_record_field]
if (default_output_record_field is not None) else None
Reported by Bandit.
Line: 48
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
)
ref_record = input_record[0]
for record in input_record:
assert schema.equal_schemas(record, ref_record,
check_field_metas=check_field_metas)
self.output_schema = schema.NewRecord(model.net, ref_record)
def _set_output_blobs(self, net, context):
Reported by Bandit.
Line: 55
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def _set_output_blobs(self, net, context):
record = self.input_record.get(context, self.default_output_record)
assert record is not None, (
"{} context is not in input record without providing default"
" output".format(context)
)
for in_blob, out_blob in zip(
record.field_blobs(), self.output_schema.field_blobs()
Reported by Bandit.
caffe2/python/layers/semi_random_features.py
7 issues
Line: 140
Column: 13
processed_random_features = self._heaviside_with_power(
net,
random_features,
self.output_schema.random.field_blobs(),
self.s
)
net.Mul([processed_random_features, learned_features],
self.output_schema.full.field_blobs())
Reported by Pylint.
Line: 144
Column: 17
self.s
)
net.Mul([processed_random_features, learned_features],
self.output_schema.full.field_blobs())
Reported by Pylint.
Line: 1
Column: 1
from caffe2.python import schema
from caffe2.python.layers.arc_cosine_feature_map import ArcCosineFeatureMap
import numpy as np
Reported by Pylint.
Line: 54
Column: 5
will be constant across all distributed
instances of the layer
"""
def __init__(
self,
model,
input_record,
output_dims,
s=1,
Reported by Pylint.
Line: 54
Column: 5
will be constant across all distributed
instances of the layer
"""
def __init__(
self,
model,
input_record,
output_dims,
s=1,
Reported by Pylint.
Line: 87
Column: 9
self.input_record_full = input_record
self.input_record_random = input_record
super(SemiRandomFeatures, self).__init__(
model,
self.input_record_full,
output_dims,
s=s,
scale=scale_random, # To initialize the random parameters
Reported by Pylint.
Line: 114
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
)
# To initialize the learnable parameters
assert (scale_learned > 0.0), \
"Expected scale (learned) > 0, got %s" % scale_learned
self.stddev = scale_learned * np.sqrt(1.0 / self.input_dims)
# Learned Parameters
(self.learned_w, self.learned_b) = self._initialize_params(
Reported by Bandit.
aten/src/ATen/native/quantized/cpu/qnnpack/src/qnnpack/params.h
7 issues
Line: 56
} neonv8;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
PYTORCH_QNNP_ALIGN(16) int16_t zero_point[8];
PYTORCH_QNNP_ALIGN(16) uint8_t max[16];
PYTORCH_QNNP_ALIGN(16) uint8_t min[16];
} sse2;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
Reported by Cppcheck.
Line: 56
} neonv8;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
PYTORCH_QNNP_ALIGN(16) int16_t zero_point[8];
PYTORCH_QNNP_ALIGN(16) uint8_t max[16];
PYTORCH_QNNP_ALIGN(16) uint8_t min[16];
} sse2;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
Reported by Cppcheck.
Line: 56
} neonv8;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
PYTORCH_QNNP_ALIGN(16) int16_t zero_point[8];
PYTORCH_QNNP_ALIGN(16) uint8_t max[16];
PYTORCH_QNNP_ALIGN(16) uint8_t min[16];
} sse2;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
Reported by Cppcheck.
Line: 56
} neonv8;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
PYTORCH_QNNP_ALIGN(16) int16_t zero_point[8];
PYTORCH_QNNP_ALIGN(16) uint8_t max[16];
PYTORCH_QNNP_ALIGN(16) uint8_t min[16];
} sse2;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
Reported by Cppcheck.
Line: 56
} neonv8;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
PYTORCH_QNNP_ALIGN(16) int16_t zero_point[8];
PYTORCH_QNNP_ALIGN(16) uint8_t max[16];
PYTORCH_QNNP_ALIGN(16) uint8_t min[16];
} sse2;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
Reported by Cppcheck.
Line: 56
} neonv8;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
PYTORCH_QNNP_ALIGN(16) int16_t zero_point[8];
PYTORCH_QNNP_ALIGN(16) uint8_t max[16];
PYTORCH_QNNP_ALIGN(16) uint8_t min[16];
} sse2;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
Reported by Cppcheck.
Line: 56
} neonv8;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
PYTORCH_QNNP_ALIGN(16) int16_t zero_point[8];
PYTORCH_QNNP_ALIGN(16) uint8_t max[16];
PYTORCH_QNNP_ALIGN(16) uint8_t min[16];
} sse2;
struct {
PYTORCH_QNNP_ALIGN(16) float* scales;
Reported by Cppcheck.
caffe2/python/modeling/compute_statistics_for_blobs.py
7 issues
Line: 23
Column: 5
logging_frequency: frequency for printing norms to logs
"""
def __init__(self, blobs, logging_frequency):
self._blobs = blobs
self._logging_frequency = logging_frequency
self._field_name_suffix = '_summary'
def modify_net(self, net, init_net=None, grad_map=None, blob_to_device=None,
Reported by Pylint.
Line: 28
Column: 5
self._logging_frequency = logging_frequency
self._field_name_suffix = '_summary'
def modify_net(self, net, init_net=None, grad_map=None, blob_to_device=None,
modify_output_record=False):
for blob_name in self._blobs:
blob = core.BlobReference(blob_name)
assert net.BlobIsDefined(blob), 'blob {} is not defined in net {} whose proto is {}'.format(blob, net.Name(), net.Proto())
Reported by Pylint.
Line: 1
Column: 1
from caffe2.python import core, schema
from caffe2.python.modeling.net_modifier import NetModifier
import numpy as np
Reported by Pylint.
Line: 28
Column: 5
self._logging_frequency = logging_frequency
self._field_name_suffix = '_summary'
def modify_net(self, net, init_net=None, grad_map=None, blob_to_device=None,
modify_output_record=False):
for blob_name in self._blobs:
blob = core.BlobReference(blob_name)
assert net.BlobIsDefined(blob), 'blob {} is not defined in net {} whose proto is {}'.format(blob, net.Name(), net.Proto())
Reported by Pylint.
Line: 33
Column: 1
for blob_name in self._blobs:
blob = core.BlobReference(blob_name)
assert net.BlobIsDefined(blob), 'blob {} is not defined in net {} whose proto is {}'.format(blob, net.Name(), net.Proto())
cast_blob = net.Cast(blob, to=core.DataType.FLOAT)
stats_name = net.NextScopedBlob(prefix=blob + self._field_name_suffix)
stats = net.Summarize(cast_blob, stats_name, to_file=0)
net.Print(stats, [], every_n=self._logging_frequency)
Reported by Pylint.
Line: 33
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
for blob_name in self._blobs:
blob = core.BlobReference(blob_name)
assert net.BlobIsDefined(blob), 'blob {} is not defined in net {} whose proto is {}'.format(blob, net.Name(), net.Proto())
cast_blob = net.Cast(blob, to=core.DataType.FLOAT)
stats_name = net.NextScopedBlob(prefix=blob + self._field_name_suffix)
stats = net.Summarize(cast_blob, stats_name, to_file=0)
net.Print(stats, [], every_n=self._logging_frequency)
Reported by Bandit.
Line: 53
Column: 5
output_field_name,
output_scalar)
def field_name_suffix(self):
return self._field_name_suffix
Reported by Pylint.
caffe2/python/modeling/compute_statistics_for_blobs_test.py
7 issues
Line: 1
Column: 1
import unittest
from caffe2.python import workspace, brew, model_helper
from caffe2.python.modeling.compute_statistics_for_blobs import (
ComputeStatisticsForBlobs
Reported by Pylint.
Line: 15
Column: 1
import numpy as np
class ComputeStatisticsForBlobsTest(unittest.TestCase):
def test_compute_statistics_for_blobs(self):
model = model_helper.ModelHelper(name="test")
data = model.net.AddExternalInput("data")
fc1 = brew.fc(model, data, "fc1", dim_in=4, dim_out=2)
Reported by Pylint.
Line: 16
Column: 5
class ComputeStatisticsForBlobsTest(unittest.TestCase):
def test_compute_statistics_for_blobs(self):
model = model_helper.ModelHelper(name="test")
data = model.net.AddExternalInput("data")
fc1 = brew.fc(model, data, "fc1", dim_in=4, dim_out=2)
# no operator name set, will use default
Reported by Pylint.
Line: 47
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
delta=1e-5)
self.assertEqual(fc1_w_summary.size, 4)
assert model.net.output_record() is None
def test_compute_statistics_for_blobs_modify_output_record(self):
model = model_helper.ModelHelper(name="test")
data = model.net.AddExternalInput("data")
fc1 = brew.fc(model, data, "fc1", dim_in=4, dim_out=2)
Reported by Bandit.
Line: 49
Column: 5
assert model.net.output_record() is None
def test_compute_statistics_for_blobs_modify_output_record(self):
model = model_helper.ModelHelper(name="test")
data = model.net.AddExternalInput("data")
fc1 = brew.fc(model, data, "fc1", dim_in=4, dim_out=2)
# no operator name set, will use default
Reported by Pylint.
Line: 81
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
self.assertEqual(fc1_w_summary.size, 4)
self.assertEqual(len(model.net.Proto().op), 8)
assert 'fc1_w' + net_modifier.field_name_suffix() in\
model.net.output_record().field_blobs()
assert 'fc2_w' + net_modifier.field_name_suffix() in\
model.net.output_record().field_blobs()
Reported by Bandit.
Line: 83
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
self.assertEqual(len(model.net.Proto().op), 8)
assert 'fc1_w' + net_modifier.field_name_suffix() in\
model.net.output_record().field_blobs()
assert 'fc2_w' + net_modifier.field_name_suffix() in\
model.net.output_record().field_blobs()
Reported by Bandit.
caffe2/contrib/playground/compute_topk_accuracy.py
7 issues
Line: 18
Column: 5
# you will and have mutated that object for
# all future calls to the function as well.
# def __init__(self, blob_name=['softmax', 'label'], opts=None, topk=1):
def __init__(self, blob_name=None, opts=None, topk=1):
if blob_name is None:
blob_name = ['softmax', 'label']
self.blob_name = blob_name
self.opts = opts
self.topk = topk
Reported by Pylint.
Line: 1
Column: 1
import caffe2.contrib.playground.meter as Meter
from caffe2.python import workspace
import numpy as np
Reported by Pylint.
Line: 11
Column: 1
import numpy as np
class ComputeTopKAccuracy(Meter.Meter):
# Python default arguments are evaluated once when the function is
# defined, not each time the function is called
# This means that if you use a mutable default argument and mutate it,
# you will and have mutated that object for
# all future calls to the function as well.
Reported by Pylint.
Line: 43
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
if len(output.shape) == 1:
output = output.reshape((1, output.shape[0]))
else:
assert len(output.shape) == 2, \
'wrong output size (1D or 2D expected)'
assert len(target.shape) == 1, 'wrong target size (1D expected)'
assert output.shape[0] == target.shape[0], \
'target and output do not match'
Reported by Bandit.
Line: 45
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
else:
assert len(output.shape) == 2, \
'wrong output size (1D or 2D expected)'
assert len(target.shape) == 1, 'wrong target size (1D expected)'
assert output.shape[0] == target.shape[0], \
'target and output do not match'
N = output.shape[0]
pred = np.argsort(-output, axis=1)[:, :self.topk]
Reported by Bandit.
Line: 46
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert len(output.shape) == 2, \
'wrong output size (1D or 2D expected)'
assert len(target.shape) == 1, 'wrong target size (1D expected)'
assert output.shape[0] == target.shape[0], \
'target and output do not match'
N = output.shape[0]
pred = np.argsort(-output, axis=1)[:, :self.topk]
correct = pred.astype(target.dtype) == np.repeat(
Reported by Bandit.
Line: 49
Column: 13
assert output.shape[0] == target.shape[0], \
'target and output do not match'
N = output.shape[0]
pred = np.argsort(-output, axis=1)[:, :self.topk]
correct = pred.astype(target.dtype) == np.repeat(
target.reshape((N, 1)), [self.topk], axis=1)
self.value += np.sum(correct[:, :self.topk])
self.iter += N
Reported by Pylint.
caffe2/contrib/playground/resnetdemo/override_no_test_model_no_checkpoint.py
7 issues
Line: 6
Column: 22
def checkpoint(self, epoch):
self.model_path = None
pass
def prep_data_parallel_models(self):
# only do train_model no test needed here
Reported by Pylint.
Line: 8
Column: 5
def checkpoint(self, epoch):
self.model_path = None
pass
def prep_data_parallel_models(self):
# only do train_model no test needed here
self.prep_a_data_parallel_model(self.train_model,
self.train_dataset, True)
Reported by Pylint.
Line: 15
Column: 21
self.prep_a_data_parallel_model(self.train_model,
self.train_dataset, True)
def run_testing_net(self):
pass
Reported by Pylint.
Line: 1
Column: 1
def checkpoint(self, epoch):
self.model_path = None
pass
Reported by Pylint.
Line: 6
Column: 1
def checkpoint(self, epoch):
self.model_path = None
pass
def prep_data_parallel_models(self):
# only do train_model no test needed here
Reported by Pylint.
Line: 10
Column: 1
self.model_path = None
pass
def prep_data_parallel_models(self):
# only do train_model no test needed here
self.prep_a_data_parallel_model(self.train_model,
self.train_dataset, True)
def run_testing_net(self):
Reported by Pylint.
Line: 15
Column: 1
self.prep_a_data_parallel_model(self.train_model,
self.train_dataset, True)
def run_testing_net(self):
pass
Reported by Pylint.
.circleci/cimodel/data/simple/util/versions.py
7 issues
Line: 1
Column: 1
class MultiPartVersion:
def __init__(self, parts, prefix=""):
self.parts = parts
self.prefix = prefix
def prefixed_parts(self):
"""
Prepends the first element of the version list
with the prefix string.
Reported by Pylint.
Line: 1
Column: 1
class MultiPartVersion:
def __init__(self, parts, prefix=""):
self.parts = parts
self.prefix = prefix
def prefixed_parts(self):
"""
Prepends the first element of the version list
with the prefix string.
Reported by Pylint.
Line: 11
Column: 9
Prepends the first element of the version list
with the prefix string.
"""
if self.parts:
return [self.prefix + str(self.parts[0])] + [str(part) for part in self.parts[1:]]
else:
return [self.prefix]
def render_dots(self):
Reported by Pylint.
Line: 16
Column: 5
else:
return [self.prefix]
def render_dots(self):
return ".".join(self.prefixed_parts())
def render_dots_or_parts(self, with_dots):
if with_dots:
return [self.render_dots()]
Reported by Pylint.
Line: 19
Column: 5
def render_dots(self):
return ".".join(self.prefixed_parts())
def render_dots_or_parts(self, with_dots):
if with_dots:
return [self.render_dots()]
else:
return self.prefixed_parts()
Reported by Pylint.
Line: 20
Column: 9
return ".".join(self.prefixed_parts())
def render_dots_or_parts(self, with_dots):
if with_dots:
return [self.render_dots()]
else:
return self.prefixed_parts()
Reported by Pylint.
Line: 26
Column: 1
return self.prefixed_parts()
class CudaVersion(MultiPartVersion):
def __init__(self, major, minor):
self.major = major
self.minor = minor
super().__init__([self.major, self.minor], "cuda")
Reported by Pylint.
.circleci/cimodel/data/simple/nightly_ios.py
7 issues
Line: 1
Column: 1
import cimodel.data.simple.ios_definitions as ios_definitions
import cimodel.lib.miniutils as miniutils
class IOSNightlyJob:
def __init__(self,
variant,
is_upload=False):
Reported by Pylint.
Line: 5
Column: 1
import cimodel.lib.miniutils as miniutils
class IOSNightlyJob:
def __init__(self,
variant,
is_upload=False):
self.variant = variant
Reported by Pylint.
Line: 13
Column: 5
self.variant = variant
self.is_upload = is_upload
def get_phase_name(self):
return "upload" if self.is_upload else "build"
def get_common_name_pieces(self, with_version_dots):
extra_name_suffix = [self.get_phase_name()] if self.is_upload else []
Reported by Pylint.
Line: 16
Column: 5
def get_phase_name(self):
return "upload" if self.is_upload else "build"
def get_common_name_pieces(self, with_version_dots):
extra_name_suffix = [self.get_phase_name()] if self.is_upload else []
common_name_pieces = [
"ios",
Reported by Pylint.
Line: 30
Column: 5
return common_name_pieces
def gen_job_name(self):
return "_".join(["pytorch"] + self.get_common_name_pieces(False))
def gen_tree(self):
extra_requires = [x.gen_job_name() for x in BUILD_CONFIGS] if self.is_upload else []
Reported by Pylint.
Line: 33
Column: 5
def gen_job_name(self):
return "_".join(["pytorch"] + self.get_common_name_pieces(False))
def gen_tree(self):
extra_requires = [x.gen_job_name() for x in BUILD_CONFIGS] if self.is_upload else []
props_dict = {
"build_environment": "-".join(["libtorch"] + self.get_common_name_pieces(True)),
"requires": extra_requires,
Reported by Pylint.
Line: 69
Column: 1
]
def get_workflow_jobs():
return [item.gen_tree() for item in WORKFLOW_DATA]
Reported by Pylint.