The following issues were found
caffe2/quantization/server/int8_gen_quant_params_min_max_test.py
21 issues
Line: 19
Column: 1
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, workspace
from caffe2.quantization.server import dnnlowp_pybind11
from hypothesis import given, settings
Reported by Pylint.
Line: 22
Column: 1
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, workspace
from caffe2.quantization.server import dnnlowp_pybind11
from hypothesis import given, settings
class TestInt8GenQuantParamsMinMaxOperator(hu.HypothesisTestCase):
@settings(max_examples=20, deadline=None)
Reported by Pylint.
Line: 23
Column: 1
import numpy as np
from caffe2.python import core, workspace
from caffe2.quantization.server import dnnlowp_pybind11
from hypothesis import given, settings
class TestInt8GenQuantParamsMinMaxOperator(hu.HypothesisTestCase):
@settings(max_examples=20, deadline=None)
@given(
Reported by Pylint.
Line: 36
Column: 54
**hu.gcs_cpu_only
)
def test_int8_gen_quant_params_min_max_op(
self, n, m, preserve_sparsity, rnd_seed, gc, dc
):
X_min = 0 if preserve_sparsity else -77
X_max = X_min + 255
np.random.seed(rnd_seed)
X = np.round(np.random.rand(n, m) * (X_max - X_min) + X_min).astype(
Reported by Pylint.
Line: 45
Column: 15
np.float32
)
# Calculate X_qparam
hist, bin_edges = np.histogram(X.flatten(), bins=2048)
X_qparam = dnnlowp_pybind11.ChooseStaticQuantizationParams(
np.min(X), np.max(X), hist, preserve_sparsity, 8, "MIN_MAX_QUANTIZATION"
)
# Build a net to generate X's qparam using the Int8GenQuantParamsMinMax op
Reported by Pylint.
Line: 1
Column: 1
# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
Reported by Pylint.
Line: 26
Column: 1
from hypothesis import given, settings
class TestInt8GenQuantParamsMinMaxOperator(hu.HypothesisTestCase):
@settings(max_examples=20, deadline=None)
@given(
n=st.integers(10, 10),
m=st.integers(10, 10),
preserve_sparsity=st.booleans(),
Reported by Pylint.
Line: 34
Column: 5
preserve_sparsity=st.booleans(),
rnd_seed=st.integers(1, 5),
**hu.gcs_cpu_only
)
def test_int8_gen_quant_params_min_max_op(
self, n, m, preserve_sparsity, rnd_seed, gc, dc
):
X_min = 0 if preserve_sparsity else -77
X_max = X_min + 255
Reported by Pylint.
Line: 34
Column: 5
preserve_sparsity=st.booleans(),
rnd_seed=st.integers(1, 5),
**hu.gcs_cpu_only
)
def test_int8_gen_quant_params_min_max_op(
self, n, m, preserve_sparsity, rnd_seed, gc, dc
):
X_min = 0 if preserve_sparsity else -77
X_max = X_min + 255
Reported by Pylint.
Line: 34
Column: 5
preserve_sparsity=st.booleans(),
rnd_seed=st.integers(1, 5),
**hu.gcs_cpu_only
)
def test_int8_gen_quant_params_min_max_op(
self, n, m, preserve_sparsity, rnd_seed, gc, dc
):
X_min = 0 if preserve_sparsity else -77
X_max = X_min + 255
Reported by Pylint.
caffe2/quantization/server/quantize_dnnlowp_op_test.py
21 issues
Line: 4
Column: 1
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep, workspace
from caffe2.quantization.server import dnnlowp_pybind11
from hypothesis import given, settings
Reported by Pylint.
Line: 7
Column: 1
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep, workspace
from caffe2.quantization.server import dnnlowp_pybind11
from hypothesis import given, settings
dyndep.InitOpsLibrary("//caffe2/caffe2/quantization/server:dnnlowp_ops")
workspace.GlobalInit(["caffe2", "--caffe2_omp_num_threads=11"])
Reported by Pylint.
Line: 8
Column: 1
import numpy as np
from caffe2.python import core, dyndep, workspace
from caffe2.quantization.server import dnnlowp_pybind11
from hypothesis import given, settings
dyndep.InitOpsLibrary("//caffe2/caffe2/quantization/server:dnnlowp_ops")
workspace.GlobalInit(["caffe2", "--caffe2_omp_num_threads=11"])
Reported by Pylint.
Line: 21
Column: 65
absorb=st.booleans(),
**hu.gcs_cpu_only)
@settings(max_examples=10, deadline=None)
def test_dnnlowp_quantize(self, size, is_empty, absorb, gc, dc):
if is_empty:
size = 0
min_ = -10.0
max_ = 20.0
X = (np.random.rand(size) * (max_ - min_) + min_).astype(np.float32)
Reported by Pylint.
Line: 1
Column: 1
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep, workspace
from caffe2.quantization.server import dnnlowp_pybind11
from hypothesis import given, settings
Reported by Pylint.
Line: 15
Column: 1
workspace.GlobalInit(["caffe2", "--caffe2_omp_num_threads=11"])
class DNNLowPQuantizeOpTest(hu.HypothesisTestCase):
@given(size=st.integers(1024, 2048),
is_empty=st.booleans(),
absorb=st.booleans(),
**hu.gcs_cpu_only)
@settings(max_examples=10, deadline=None)
Reported by Pylint.
Line: 21
Column: 5
absorb=st.booleans(),
**hu.gcs_cpu_only)
@settings(max_examples=10, deadline=None)
def test_dnnlowp_quantize(self, size, is_empty, absorb, gc, dc):
if is_empty:
size = 0
min_ = -10.0
max_ = 20.0
X = (np.random.rand(size) * (max_ - min_) + min_).astype(np.float32)
Reported by Pylint.
Line: 21
Column: 5
absorb=st.booleans(),
**hu.gcs_cpu_only)
@settings(max_examples=10, deadline=None)
def test_dnnlowp_quantize(self, size, is_empty, absorb, gc, dc):
if is_empty:
size = 0
min_ = -10.0
max_ = 20.0
X = (np.random.rand(size) * (max_ - min_) + min_).astype(np.float32)
Reported by Pylint.
Line: 21
Column: 5
absorb=st.booleans(),
**hu.gcs_cpu_only)
@settings(max_examples=10, deadline=None)
def test_dnnlowp_quantize(self, size, is_empty, absorb, gc, dc):
if is_empty:
size = 0
min_ = -10.0
max_ = 20.0
X = (np.random.rand(size) * (max_ - min_) + min_).astype(np.float32)
Reported by Pylint.
Line: 21
Column: 5
absorb=st.booleans(),
**hu.gcs_cpu_only)
@settings(max_examples=10, deadline=None)
def test_dnnlowp_quantize(self, size, is_empty, absorb, gc, dc):
if is_empty:
size = 0
min_ = -10.0
max_ = 20.0
X = (np.random.rand(size) * (max_ - min_) + min_).astype(np.float32)
Reported by Pylint.
caffe2/python/operator_test/heatmap_max_keypoint_op_test.py
21 issues
Line: 10
Column: 1
import torch
import sys
import unittest
from scipy import interpolate
import caffe2.python.hypothesis_test_util as hu
from caffe2.python import core, utils
from caffe2.proto import caffe2_pb2
Reported by Pylint.
Line: 33
Column: 9
def c10_op_ref(maps, rois):
keypoints = torch.ops._caffe2.HeatmapMaxKeypoint(
torch.tensor(maps),
torch.tensor(rois),
should_output_softmax=True,
)
return [keypoints.numpy()]
Reported by Pylint.
Line: 34
Column: 9
def c10_op_ref(maps, rois):
keypoints = torch.ops._caffe2.HeatmapMaxKeypoint(
torch.tensor(maps),
torch.tensor(rois),
should_output_softmax=True,
)
return [keypoints.numpy()]
Reported by Pylint.
Line: 32
Column: 17
def c10_op_ref(maps, rois):
keypoints = torch.ops._caffe2.HeatmapMaxKeypoint(
torch.tensor(maps),
torch.tensor(rois),
should_output_softmax=True,
)
return [keypoints.numpy()]
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import torch
import sys
import unittest
Reported by Pylint.
Line: 8
Column: 1
import numpy as np
import torch
import sys
import unittest
from scipy import interpolate
import caffe2.python.hypothesis_test_util as hu
from caffe2.python import core, utils
Reported by Pylint.
Line: 9
Column: 1
import numpy as np
import torch
import sys
import unittest
from scipy import interpolate
import caffe2.python.hypothesis_test_util as hu
from caffe2.python import core, utils
from caffe2.proto import caffe2_pb2
Reported by Pylint.
Line: 23
Column: 1
HEATMAP_SIZE = 56
def heatmap_FAIR_keypoint_ref(maps, rois):
return [keypoint_utils.heatmaps_to_keypoints(maps, rois)]
def heatmap_approx_keypoint_ref(maps, rois):
return [keypoint_utils.approx_heatmap_keypoint(maps, rois)]
Reported by Pylint.
Line: 23
Column: 1
HEATMAP_SIZE = 56
def heatmap_FAIR_keypoint_ref(maps, rois):
return [keypoint_utils.heatmaps_to_keypoints(maps, rois)]
def heatmap_approx_keypoint_ref(maps, rois):
return [keypoint_utils.approx_heatmap_keypoint(maps, rois)]
Reported by Pylint.
Line: 27
Column: 1
return [keypoint_utils.heatmaps_to_keypoints(maps, rois)]
def heatmap_approx_keypoint_ref(maps, rois):
return [keypoint_utils.approx_heatmap_keypoint(maps, rois)]
def c10_op_ref(maps, rois):
keypoints = torch.ops._caffe2.HeatmapMaxKeypoint(
Reported by Pylint.
caffe2/quantization/server/int8_gen_quant_params_test.py
21 issues
Line: 19
Column: 1
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, workspace
from caffe2.quantization.server import dnnlowp_pybind11
from hypothesis import given, settings
Reported by Pylint.
Line: 22
Column: 1
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, workspace
from caffe2.quantization.server import dnnlowp_pybind11
from hypothesis import given, settings
class TestInt8GenQuantParamsOperator(hu.HypothesisTestCase):
@settings(max_examples=20, deadline=None)
Reported by Pylint.
Line: 23
Column: 1
import numpy as np
from caffe2.python import core, workspace
from caffe2.quantization.server import dnnlowp_pybind11
from hypothesis import given, settings
class TestInt8GenQuantParamsOperator(hu.HypothesisTestCase):
@settings(max_examples=20, deadline=None)
@given(
Reported by Pylint.
Line: 45
Column: 76
**hu.gcs_cpu_only
)
def test_int8_gen_quant_params_op(
self, n, m, k, quantization_kind, preserve_sparsity, rnd_seed, gc, dc
):
assert n > 0, "Zero samples in the input data"
X_min = 0 if preserve_sparsity else -77
X_max = X_min + 255
np.random.seed(rnd_seed)
Reported by Pylint.
Line: 55
Column: 15
np.float32
)
# Calculate X_qparam
hist, bin_edges = np.histogram(X.flatten(), bins=2048)
X_qparam = dnnlowp_pybind11.ChooseStaticQuantizationParams(
np.min(X), np.max(X), hist, preserve_sparsity, 8, quantization_kind
)
# Build a net to generate X's qparam using the Int8GenQuantParams op
Reported by Pylint.
Line: 1
Column: 1
# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
Reported by Pylint.
Line: 26
Column: 1
from hypothesis import given, settings
class TestInt8GenQuantParamsOperator(hu.HypothesisTestCase):
@settings(max_examples=20, deadline=None)
@given(
n=st.integers(10, 100),
m=st.integers(1, 128),
k=st.integers(64, 1024),
Reported by Pylint.
Line: 43
Column: 5
preserve_sparsity=st.booleans(),
rnd_seed=st.integers(1, 5),
**hu.gcs_cpu_only
)
def test_int8_gen_quant_params_op(
self, n, m, k, quantization_kind, preserve_sparsity, rnd_seed, gc, dc
):
assert n > 0, "Zero samples in the input data"
X_min = 0 if preserve_sparsity else -77
Reported by Pylint.
Line: 43
Column: 5
preserve_sparsity=st.booleans(),
rnd_seed=st.integers(1, 5),
**hu.gcs_cpu_only
)
def test_int8_gen_quant_params_op(
self, n, m, k, quantization_kind, preserve_sparsity, rnd_seed, gc, dc
):
assert n > 0, "Zero samples in the input data"
X_min = 0 if preserve_sparsity else -77
Reported by Pylint.
Line: 43
Column: 5
preserve_sparsity=st.booleans(),
rnd_seed=st.integers(1, 5),
**hu.gcs_cpu_only
)
def test_int8_gen_quant_params_op(
self, n, m, k, quantization_kind, preserve_sparsity, rnd_seed, gc, dc
):
assert n > 0, "Zero samples in the input data"
X_min = 0 if preserve_sparsity else -77
Reported by Pylint.
caffe2/python/operator_test/video_input_op_test.py
21 issues
Line: 17
Column: 5
try:
import lmdb
except ImportError:
raise unittest.SkipTest("python-lmdb is not installed")
class VideoInputOpTest(unittest.TestCase):
def create_a_list(self, output_file, line, n):
# create a list that repeat a line n times
Reported by Pylint.
Line: 1
Column: 1
import os
import shutil
import sys
import tempfile
import unittest
import numpy as np
Reported by Pylint.
Line: 20
Column: 1
raise unittest.SkipTest("python-lmdb is not installed")
class VideoInputOpTest(unittest.TestCase):
def create_a_list(self, output_file, line, n):
# create a list that repeat a line n times
# used for creating a list file for simple test input
with open(output_file, "w") as file:
for _i in range(n):
Reported by Pylint.
Line: 21
Column: 5
class VideoInputOpTest(unittest.TestCase):
def create_a_list(self, output_file, line, n):
# create a list that repeat a line n times
# used for creating a list file for simple test input
with open(output_file, "w") as file:
for _i in range(n):
file.write(line)
Reported by Pylint.
Line: 21
Column: 5
class VideoInputOpTest(unittest.TestCase):
def create_a_list(self, output_file, line, n):
# create a list that repeat a line n times
# used for creating a list file for simple test input
with open(output_file, "w") as file:
for _i in range(n):
file.write(line)
Reported by Pylint.
Line: 21
Column: 5
class VideoInputOpTest(unittest.TestCase):
def create_a_list(self, output_file, line, n):
# create a list that repeat a line n times
# used for creating a list file for simple test input
with open(output_file, "w") as file:
for _i in range(n):
file.write(line)
Reported by Pylint.
Line: 28
Column: 5
for _i in range(n):
file.write(line)
def create_video_db(self, list_file, output_file, use_list=False):
# Write to lmdb database...
LMDB_MAP_SIZE = 1 << 40 # MODIFY
env = lmdb.open(output_file, map_size=LMDB_MAP_SIZE)
total_size = 0
Reported by Pylint.
Line: 28
Column: 5
for _i in range(n):
file.write(line)
def create_video_db(self, list_file, output_file, use_list=False):
# Write to lmdb database...
LMDB_MAP_SIZE = 1 << 40 # MODIFY
env = lmdb.open(output_file, map_size=LMDB_MAP_SIZE)
total_size = 0
Reported by Pylint.
Line: 28
Column: 5
for _i in range(n):
file.write(line)
def create_video_db(self, list_file, output_file, use_list=False):
# Write to lmdb database...
LMDB_MAP_SIZE = 1 << 40 # MODIFY
env = lmdb.open(output_file, map_size=LMDB_MAP_SIZE)
total_size = 0
Reported by Pylint.
Line: 30
Column: 9
def create_video_db(self, list_file, output_file, use_list=False):
# Write to lmdb database...
LMDB_MAP_SIZE = 1 << 40 # MODIFY
env = lmdb.open(output_file, map_size=LMDB_MAP_SIZE)
total_size = 0
file_name = []
start_frame = []
Reported by Pylint.
test/onnx/export_onnx_tests_filter.py
21 issues
Line: 3
Column: 1
import argparse
import glob
import onnx.backend.test
import os
import shutil
from test_caffe2_common import run_generated_test
import google.protobuf.text_format
import test_onnx_common
import traceback
Reported by Pylint.
Line: 7
Column: 1
import os
import shutil
from test_caffe2_common import run_generated_test
import google.protobuf.text_format
import test_onnx_common
import traceback
_fail_test_dir = os.path.join(os.path.dirname(
os.path.realpath(__file__)), "fail", "generated")
Reported by Pylint.
Line: 31
Column: 35
model_file = os.path.join(dir_name, "model.onnx")
data_dir_pattern = os.path.join(dir_name, "test_data_set_*")
for data_dir in glob.glob(data_dir_pattern):
for device in torch.testing.get_all_device_types():
run_generated_test(model_file, data_dir, device)
if expect:
expect_file = os.path.join(_expect_dir,
"PyTorch-generated-{}.expect".format(d))
with open(expect_file, "w") as text_file:
Reported by Pylint.
Line: 20
Column: 48
def collect_generated_testcases(root_dir=test_onnx_common.pytorch_converted_dir,
verbose=False, fail_dir=None, expect=True):
total_pass = 0
total_fail = 0
for d in os.listdir(root_dir):
dir_name = os.path.join(root_dir, d)
if os.path.isdir(dir_name):
Reported by Pylint.
Line: 20
Column: 33
def collect_generated_testcases(root_dir=test_onnx_common.pytorch_converted_dir,
verbose=False, fail_dir=None, expect=True):
total_pass = 0
total_fail = 0
for d in os.listdir(root_dir):
dir_name = os.path.join(root_dir, d)
if os.path.isdir(dir_name):
Reported by Pylint.
Line: 20
Column: 63
def collect_generated_testcases(root_dir=test_onnx_common.pytorch_converted_dir,
verbose=False, fail_dir=None, expect=True):
total_pass = 0
total_fail = 0
for d in os.listdir(root_dir):
dir_name = os.path.join(root_dir, d)
if os.path.isdir(dir_name):
Reported by Pylint.
Line: 26
Column: 13
for d in os.listdir(root_dir):
dir_name = os.path.join(root_dir, d)
if os.path.isdir(dir_name):
failed = False
try:
model_file = os.path.join(dir_name, "model.onnx")
data_dir_pattern = os.path.join(dir_name, "test_data_set_*")
for data_dir in glob.glob(data_dir_pattern):
for device in torch.testing.get_all_device_types():
Reported by Pylint.
Line: 42
Column: 13
onnx.helper.strip_doc_string(model)
text_file.write(google.protobuf.text_format.MessageToString(model))
total_pass += 1
except Exception as e:
if verbose:
print("The test case in {} failed!".format(dir_name))
traceback.print_exc()
if fail_dir is None:
shutil.rmtree(dir_name)
Reported by Pylint.
Line: 42
Column: 20
onnx.helper.strip_doc_string(model)
text_file.write(google.protobuf.text_format.MessageToString(model))
total_pass += 1
except Exception as e:
if verbose:
print("The test case in {} failed!".format(dir_name))
traceback.print_exc()
if fail_dir is None:
shutil.rmtree(dir_name)
Reported by Pylint.
Line: 1
Column: 1
import argparse
import glob
import onnx.backend.test
import os
import shutil
from test_caffe2_common import run_generated_test
import google.protobuf.text_format
import test_onnx_common
import traceback
Reported by Pylint.
caffe2/python/queue_util.py
21 issues
Line: 17
Column: 1
logger = logging.getLogger(__name__)
class _QueueReader(dataio.Reader):
def __init__(self, wrapper, num_dequeue_records=1):
assert wrapper.schema is not None, (
'Queue needs a schema in order to be read from.')
dataio.Reader.__init__(self, wrapper.schema())
self._wrapper = wrapper
Reported by Pylint.
Line: 25
Column: 5
self._wrapper = wrapper
self._num_dequeue_records = num_dequeue_records
def setup_ex(self, init_net, exit_net):
exit_net.CloseBlobsQueue([self._wrapper.queue()], 0)
def read_ex(self, local_init_net, local_finish_net):
self._wrapper._new_reader(local_init_net)
dequeue_net = core.Net('dequeue')
Reported by Pylint.
Line: 29
Column: 9
exit_net.CloseBlobsQueue([self._wrapper.queue()], 0)
def read_ex(self, local_init_net, local_finish_net):
self._wrapper._new_reader(local_init_net)
dequeue_net = core.Net('dequeue')
fields, status_blob = dequeue(
dequeue_net,
self._wrapper.queue(),
len(self.schema().field_names()),
Reported by Pylint.
Line: 39
Column: 5
num_records=self._num_dequeue_records)
return [dequeue_net], status_blob, fields
def read(self, net):
net, _, fields = self.read_ex(net, None)
return net, fields
class _QueueWriter(dataio.Writer):
Reported by Pylint.
Line: 44
Column: 1
return net, fields
class _QueueWriter(dataio.Writer):
def __init__(self, wrapper):
self._wrapper = wrapper
def setup_ex(self, init_net, exit_net):
exit_net.CloseBlobsQueue([self._wrapper.queue()], 0)
Reported by Pylint.
Line: 48
Column: 5
def __init__(self, wrapper):
self._wrapper = wrapper
def setup_ex(self, init_net, exit_net):
exit_net.CloseBlobsQueue([self._wrapper.queue()], 0)
def write_ex(self, fields, local_init_net, local_finish_net, status):
self._wrapper._new_writer(self.schema(), local_init_net)
enqueue_net = core.Net('enqueue')
Reported by Pylint.
Line: 51
Column: 5
def setup_ex(self, init_net, exit_net):
exit_net.CloseBlobsQueue([self._wrapper.queue()], 0)
def write_ex(self, fields, local_init_net, local_finish_net, status):
self._wrapper._new_writer(self.schema(), local_init_net)
enqueue_net = core.Net('enqueue')
enqueue(enqueue_net, self._wrapper.queue(), fields, status)
return [enqueue_net]
Reported by Pylint.
Line: 52
Column: 9
exit_net.CloseBlobsQueue([self._wrapper.queue()], 0)
def write_ex(self, fields, local_init_net, local_finish_net, status):
self._wrapper._new_writer(self.schema(), local_init_net)
enqueue_net = core.Net('enqueue')
enqueue(enqueue_net, self._wrapper.queue(), fields, status)
return [enqueue_net]
Reported by Pylint.
Line: 107
Column: 28
if blob not in queue_blobs:
queue_blobs.append(blob)
else:
logger.warning("Need to copy blob {} to enqueue".format(blob))
queue_blobs.append(net.Copy(blob))
results = net.SafeEnqueueBlobs([queue] + queue_blobs, queue_blobs + [status])
return results[-1]
Reported by Pylint.
Line: 1
Column: 1
## @package queue_util
# Module caffe2.python.queue_util
from caffe2.python import core, dataio
from caffe2.python.task import TaskGroup
Reported by Pylint.
caffe2/contrib/fakelowp/test/test_fusions.py
21 issues
Line: 2
Column: 1
# Must happen before importing caffe2.python.*
import caffe2.python.fakelowp.init_shared_libs # noqa
import datetime
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
Reported by Pylint.
Line: 5
Column: 1
import caffe2.python.fakelowp.init_shared_libs # noqa
import datetime
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
Reported by Pylint.
Line: 6
Column: 1
import datetime
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
Reported by Pylint.
Line: 7
Column: 1
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
Reported by Pylint.
Line: 8
Column: 1
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
workspace.GlobalInit(
Reported by Pylint.
Line: 9
Column: 1
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
workspace.GlobalInit(
[
Reported by Pylint.
Line: 10
Column: 1
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
workspace.GlobalInit(
[
"caffe2",
Reported by Pylint.
Line: 11
Column: 1
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
workspace.GlobalInit(
[
"caffe2",
"--glow_global_fp16=1",
Reported by Pylint.
Line: 2
Column: 1
# Must happen before importing caffe2.python.*
import caffe2.python.fakelowp.init_shared_libs # noqa
import datetime
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
Reported by Pylint.
Line: 1
Column: 1
# Must happen before importing caffe2.python.*
import caffe2.python.fakelowp.init_shared_libs # noqa
import datetime
import numpy as np
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
Reported by Pylint.
caffe2/contrib/tensorboard/tensorboard_test.py
21 issues
Line: 1
Column: 1
import click.testing
import numpy as np
import os
import tempfile
Reported by Pylint.
Line: 8
Column: 1
import click.testing
import numpy as np
import os
import tempfile
import unittest
from caffe2.python import brew, core, model_helper
import caffe2.contrib.tensorboard.tensorboard as tb
Reported by Pylint.
Line: 9
Column: 1
import click.testing
import numpy as np
import os
import tempfile
import unittest
from caffe2.python import brew, core, model_helper
import caffe2.contrib.tensorboard.tensorboard as tb
import caffe2.contrib.tensorboard.tensorboard_exporter as tb_exporter
Reported by Pylint.
Line: 10
Column: 1
import numpy as np
import os
import tempfile
import unittest
from caffe2.python import brew, core, model_helper
import caffe2.contrib.tensorboard.tensorboard as tb
import caffe2.contrib.tensorboard.tensorboard_exporter as tb_exporter
Reported by Pylint.
Line: 23
Column: 1
from tensorflow import GraphDef
def load_events(filename):
try:
# tensorboard>=1.14.0
from tensorboard.backend.event_processing import event_file_loader
loader = event_file_loader.EventFileLoader(filename)
return list(loader.Load())
Reported by Pylint.
Line: 26
Column: 9
def load_events(filename):
try:
# tensorboard>=1.14.0
from tensorboard.backend.event_processing import event_file_loader
loader = event_file_loader.EventFileLoader(filename)
return list(loader.Load())
except ImportError:
import tensorflow as tf
return list(tf.train.summary_iterator(filename))
Reported by Pylint.
Line: 30
Column: 9
loader = event_file_loader.EventFileLoader(filename)
return list(loader.Load())
except ImportError:
import tensorflow as tf
return list(tf.train.summary_iterator(filename))
class TensorboardTest(unittest.TestCase):
Reported by Pylint.
Line: 34
Column: 1
return list(tf.train.summary_iterator(filename))
class TensorboardTest(unittest.TestCase):
def test_events(self):
runner = click.testing.CliRunner()
c2_dir = tempfile.mkdtemp()
np.random.seed(1701)
Reported by Pylint.
Line: 36
Column: 5
class TensorboardTest(unittest.TestCase):
def test_events(self):
runner = click.testing.CliRunner()
c2_dir = tempfile.mkdtemp()
np.random.seed(1701)
n_iters = 2
blobs = ["w", "b"]
Reported by Pylint.
Line: 36
Column: 5
class TensorboardTest(unittest.TestCase):
def test_events(self):
runner = click.testing.CliRunner()
c2_dir = tempfile.mkdtemp()
np.random.seed(1701)
n_iters = 2
blobs = ["w", "b"]
Reported by Pylint.
aten/src/ATen/native/vulkan/api/vk_mem_alloc.h
21 issues
Line: 4924
Column: 9
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
{
const size_t len = strlen(srcStr);
char* const result = vma_new_array(allocs, char, len + 1);
memcpy(result, srcStr, len + 1);
return result;
}
else
{
return VMA_NULL;
Reported by FlawFinder.
Line: 5026
Column: 13
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
{
if(m_Count != 0)
{
memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
}
}
~VmaVector()
{
Reported by FlawFinder.
Line: 5042
Column: 17
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
resize(rhs.m_Count);
if(m_Count != 0)
{
memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
}
}
return *this;
}
Reported by FlawFinder.
Line: 5099
Column: 17
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
if(m_Count != 0)
{
memcpy(newArray, m_pArray, m_Count * sizeof(T));
}
VmaFree(m_Allocator.m_pCallbacks, m_pArray);
m_Capacity = newCapacity;
m_pArray = newArray;
}
Reported by FlawFinder.
Line: 5125
Column: 17
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
if(elementsToCopy != 0)
{
memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
}
VmaFree(m_Allocator.m_pCallbacks, m_pArray);
m_Capacity = newCapacity;
m_pArray = newArray;
}
Reported by FlawFinder.
Line: 5325
Column: 17
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
m_DynamicArray.resize(newCount, freeMemory);
if(m_Count > 0)
{
memcpy(m_DynamicArray.data(), m_StaticArray, m_Count * sizeof(T));
}
}
else if(newCount <= N && m_Count > N)
{
// Shrinking, moving from m_DynamicArray to m_StaticArray
Reported by FlawFinder.
Line: 5333
Column: 17
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
// Shrinking, moving from m_DynamicArray to m_StaticArray
if(newCount > 0)
{
memcpy(m_StaticArray, m_DynamicArray.data(), newCount * sizeof(T));
}
m_DynamicArray.resize(0, freeMemory);
}
else
{
Reported by FlawFinder.
Line: 5434
Column: 20
CWE codes:
119
120
Suggestion:
Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length
union Item
{
uint32_t NextFreeIndex;
alignas(T) char Value[sizeof(T)];
};
struct ItemBlock
{
Item* pItems;
Reported by FlawFinder.
Line: 7785
Column: 9
CWE codes:
119
120
Suggestion:
Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length
const char* GetString() const { return m_Str; }
private:
char m_PtrStr[17];
const char* m_Str;
};
bool m_UseMutex;
VmaRecordFlags m_Flags;
Reported by FlawFinder.
Line: 8266
Column: 9
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
{
const size_t oldCount = m_Data.size();
m_Data.resize(oldCount + strLen);
memcpy(m_Data.data() + oldCount, pStr, strLen);
}
}
void VmaStringBuilder::AddNumber(uint32_t num)
{
Reported by FlawFinder.