The following issues were found
scripts/release_notes/test_release_notes.py
5 issues
Line: 1
Column: 1
import unittest
import tempfile
from commitlist import CommitList
class TestCommitList(unittest.TestCase):
def test_create_new(self):
with tempfile.TemporaryDirectory() as tempdir:
commit_list_path = f'{tempdir}/commitlist.csv'
commit_list = CommitList.create_new(commit_list_path, 'v1.5.0', '7543e7e558')
Reported by Pylint.
Line: 5
Column: 1
import tempfile
from commitlist import CommitList
class TestCommitList(unittest.TestCase):
def test_create_new(self):
with tempfile.TemporaryDirectory() as tempdir:
commit_list_path = f'{tempdir}/commitlist.csv'
commit_list = CommitList.create_new(commit_list_path, 'v1.5.0', '7543e7e558')
self.assertEqual(len(commit_list.commits), 2143)
Reported by Pylint.
Line: 6
Column: 5
from commitlist import CommitList
class TestCommitList(unittest.TestCase):
def test_create_new(self):
with tempfile.TemporaryDirectory() as tempdir:
commit_list_path = f'{tempdir}/commitlist.csv'
commit_list = CommitList.create_new(commit_list_path, 'v1.5.0', '7543e7e558')
self.assertEqual(len(commit_list.commits), 2143)
self.assertEqual(commit_list.commits[0].commit_hash, '7335f079ab')
Reported by Pylint.
Line: 16
Column: 5
self.assertEqual(commit_list.commits[-1].commit_hash, '7543e7e558')
self.assertTrue(commit_list.commits[-1].title.startswith('Migrate minall, max, maxall'))
def test_read_write(self):
with tempfile.TemporaryDirectory() as tempdir:
commit_list_path = f'{tempdir}/commitlist.csv'
initial = CommitList.create_new(commit_list_path, 'v1.5.0', '7543e7e558')
initial.write_to_disk()
Reported by Pylint.
Line: 30
Column: 5
for commit, expected in zip(commit_list.commits, expected.commits):
self.assertEqual(commit, expected)
def test_update_to(self):
with tempfile.TemporaryDirectory() as tempdir:
commit_list_path = f'{tempdir}/commitlist.csv'
initial = CommitList.create_new(commit_list_path, 'v1.5.0', '7543e7e558')
initial.commits[-2].category = 'foobar'
self.assertEqual(len(initial.commits), 2143)
Reported by Pylint.
caffe2/video/video_io.cc
5 issues
Line: 90
Column: 7
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
const unsigned char* curr_frame = buffer_rgb +
(l * sampling_rate_of + j) * height * width * channels_rgb;
cv::Mat img = cv::Mat::zeros(height, width, CV_8UC3);
memcpy(
img.data,
curr_frame,
height * width * channels_rgb * sizeof(unsigned char));
// crop and mirror the frame
Reported by FlawFinder.
Line: 124
Column: 7
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
imgs[c] -= mean_of[c];
imgs[c] *= inv_std_of[c];
memcpy(
transformed_clip + c * channel_size_flow + l * frame_size,
imgs[c].data,
frame_size * sizeof(float));
}
Reported by FlawFinder.
Line: 149
Column: 9
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
mag -= mean_of[c];
mag *= inv_std_of[c];
memcpy(
transformed_clip + c * channel_size_flow + l * frame_size,
mag.data,
frame_size * sizeof(float));
break;
Reported by FlawFinder.
Line: 166
Column: 9
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
first_gray -= mean_of[c];
first_gray *= inv_std_of[c];
memcpy(
transformed_clip + c * channel_size_flow + l * frame_size,
first_gray.data,
frame_size * sizeof(float));
break;
Reported by FlawFinder.
Line: 185
Column: 11
CWE codes:
120
Suggestion:
Make sure destination can always hold the source data
chans[c - 2] -= mean_of[c];
chans[c - 2] *= inv_std_of[c];
memcpy(
transformed_clip + c * channel_size_flow + l * frame_size,
chans[c - 2].data,
frame_size * sizeof(float));
}
break;
Reported by FlawFinder.
test/onnx/model_defs/__init__.py
5 issues
Line: 1
Column: 1
from .squeezenet import * # noqa: F403
from .super_resolution import * # noqa: F403
from .op_test import * # noqa: F403
from .srresnet import * # noqa: F403
Reported by Pylint.
Line: 2
Column: 1
from .squeezenet import * # noqa: F403
from .super_resolution import * # noqa: F403
from .op_test import * # noqa: F403
from .srresnet import * # noqa: F403
Reported by Pylint.
Line: 3
Column: 1
from .squeezenet import * # noqa: F403
from .super_resolution import * # noqa: F403
from .op_test import * # noqa: F403
from .srresnet import * # noqa: F403
Reported by Pylint.
Line: 4
Column: 1
from .squeezenet import * # noqa: F403
from .super_resolution import * # noqa: F403
from .op_test import * # noqa: F403
from .srresnet import * # noqa: F403
Reported by Pylint.
Line: 1
Column: 1
from .squeezenet import * # noqa: F403
from .super_resolution import * # noqa: F403
from .op_test import * # noqa: F403
from .srresnet import * # noqa: F403
Reported by Pylint.
caffe2/python/operator_test/emptysample_ops_test.py
5 issues
Line: 1
Column: 1
from caffe2.python import core, workspace
from caffe2.python.test_util import TestCase
import numpy as np
Reported by Pylint.
Line: 30
Column: 1
[[2, 2], [0, 0], [4, 4], [4, 4], [0, 0]]]
class TestEmptySampleOps(TestCase):
def test_emptysample(self):
for i in range(0, 3):
PadEmptyTest = core.CreateOperator(
'PadEmptySamples',
['lengths', 'features1', 'features2'],
Reported by Pylint.
Line: 31
Column: 5
class TestEmptySampleOps(TestCase):
def test_emptysample(self):
for i in range(0, 3):
PadEmptyTest = core.CreateOperator(
'PadEmptySamples',
['lengths', 'features1', 'features2'],
['out_lengths', 'out_features1', 'out_features2'],
Reported by Pylint.
Line: 31
Column: 5
class TestEmptySampleOps(TestCase):
def test_emptysample(self):
for i in range(0, 3):
PadEmptyTest = core.CreateOperator(
'PadEmptySamples',
['lengths', 'features1', 'features2'],
['out_lengths', 'out_features1', 'out_features2'],
Reported by Pylint.
Line: 33
Column: 13
class TestEmptySampleOps(TestCase):
def test_emptysample(self):
for i in range(0, 3):
PadEmptyTest = core.CreateOperator(
'PadEmptySamples',
['lengths', 'features1', 'features2'],
['out_lengths', 'out_features1', 'out_features2'],
)
workspace.FeedBlob(
Reported by Pylint.
caffe2/python/operator_test/duplicate_operands_test.py
5 issues
Line: 1
Column: 1
import numpy as np
from caffe2.python import core, workspace
from caffe2.python.test_util import TestCase
Reported by Pylint.
Line: 12
Column: 1
from caffe2.python.test_util import TestCase
class TestDuplicateOperands(TestCase):
def test_duplicate_operands(self):
net = core.Net('net')
shape = (2, 4)
x_in = np.random.uniform(size=shape)
x = net.GivenTensorFill([], 'X', shape=shape,
Reported by Pylint.
Line: 13
Column: 5
class TestDuplicateOperands(TestCase):
def test_duplicate_operands(self):
net = core.Net('net')
shape = (2, 4)
x_in = np.random.uniform(size=shape)
x = net.GivenTensorFill([], 'X', shape=shape,
values=x_in.flatten().tolist())
Reported by Pylint.
Line: 17
Column: 9
net = core.Net('net')
shape = (2, 4)
x_in = np.random.uniform(size=shape)
x = net.GivenTensorFill([], 'X', shape=shape,
values=x_in.flatten().tolist())
xsq = net.Mul([x, x])
y = net.DotProduct([xsq, xsq])
net.AddGradientOperators([y])
workspace.RunNetOnce(net)
Reported by Pylint.
Line: 20
Column: 9
x = net.GivenTensorFill([], 'X', shape=shape,
values=x_in.flatten().tolist())
xsq = net.Mul([x, x])
y = net.DotProduct([xsq, xsq])
net.AddGradientOperators([y])
workspace.RunNetOnce(net)
self.assertTrue(np.allclose(workspace.FetchBlob('X_grad'),
4 * x_in**3))
Reported by Pylint.
caffe2/python/regularizer_context.py
5 issues
Line: 1
Column: 1
# @package regularizer_context
# Module caffe2.python.regularizer_context
from caffe2.python import context
from caffe2.python.modifier_context import (
Reported by Pylint.
Line: 18
Column: 5
provide context to allow param_info to have different regularizers
"""
def has_regularizer(self, name):
return self._has_modifier(name)
def get_regularizer(self, name):
assert self.has_regularizer(name), (
"{} regularizer is not provided!".format(name))
Reported by Pylint.
Line: 21
Column: 5
def has_regularizer(self, name):
return self._has_modifier(name)
def get_regularizer(self, name):
assert self.has_regularizer(name), (
"{} regularizer is not provided!".format(name))
return self._get_modifier(name)
Reported by Pylint.
Line: 22
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
return self._has_modifier(name)
def get_regularizer(self, name):
assert self.has_regularizer(name), (
"{} regularizer is not provided!".format(name))
return self._get_modifier(name)
class UseRegularizer(UseModifierBase):
Reported by Bandit.
Line: 27
Column: 1
return self._get_modifier(name)
class UseRegularizer(UseModifierBase):
'''
context class to allow setting the current context.
Example usage with layer:
regularizers = {'reg1': reg1, 'reg2': reg2}
with UseRegularizer(regularizers):
Reported by Pylint.
caffe2/python/operator_test/atomic_ops_test.py
5 issues
Line: 49
Column: 9
plan.AddStep(super_step)
workspace.RunPlan(plan)
# checksum = sum[i=1..20000](i) = 20000 * 20001 / 2 = 200010000
self.assertEquals(workspace.FetchBlob(checksum), 200010000)
@unittest.skip("Test is flaky: https://github.com/pytorch/pytorch/issues/28179")
def test_atomic64_ops(self):
"""
Test that both countdown and checksum are update atomically by having
Reported by Pylint.
Line: 88
Column: 9
plan.AddStep(super_step)
workspace.RunPlan(plan)
# checksum = sum[i=1..20000](i) = 20000 * 20001 / 2 = 200010000
self.assertEquals(workspace.FetchBlob(checksum), 200010000)
if __name__ == "__main__":
unittest.main()
Reported by Pylint.
Line: 1
Column: 1
from caffe2.python import core, workspace
from caffe2.python.test_util import TestCase
import unittest
Reported by Pylint.
Line: 8
Column: 1
from caffe2.python import core, workspace
from caffe2.python.test_util import TestCase
import unittest
class TestAtomicOps(TestCase):
@unittest.skip("Test is flaky: https://github.com/pytorch/pytorch/issues/28179")
def test_atomic_ops(self):
Reported by Pylint.
Line: 11
Column: 1
import unittest
class TestAtomicOps(TestCase):
@unittest.skip("Test is flaky: https://github.com/pytorch/pytorch/issues/28179")
def test_atomic_ops(self):
"""
Test that both countdown and checksum are update atomically by having
cowntdown count from 20k to 0 from parallel the workers and updating
Reported by Pylint.
caffe2/python/onnx/workspace.py
5 issues
Line: 1
Column: 1
## @package onnx
# Module caffe2.python.onnx.workspace
import uuid
Reported by Pylint.
Line: 15
Column: 1
# Separating out the context manager part so that users won't
# (mis-)use Workspace instances as context managers
class _WorkspaceCtx(object):
def __init__(self, workspace_id):
self.workspace_id = workspace_id
# A stack, so that the context manager is reentrant.
self.workspace_stack = []
Reported by Pylint.
Line: 26
Column: 9
workspace.SwitchWorkspace(self.workspace_id, create_if_missing=True)
def __exit__(self, exc_type, exc_value, traceback):
w = self.workspace_stack.pop()
# Strictly speaking, create_if_missing here is unnecessary, since a user
# is not supposed to be allowed to destruct a workspace while we're in
# it. However, empirically, it has been observed that during abnormal
# shutdown, Caffe2 deletes its default workspace fairly early in the
# final calls to destructors. In this case, we may attempt to exit
Reported by Pylint.
Line: 37
Column: 1
workspace.SwitchWorkspace(w, create_if_missing=True)
class Workspace(object):
"""
An object representing a Caffe2 workspace. It is a context manager,
so you can say 'with workspace:' to use the represented workspace
as your global workspace. It also supports every method supported
by caffe2.python.workspace, but instead of running these operations
Reported by Pylint.
Line: 61
Column: 9
self._ctx = _WorkspaceCtx(str(uuid.uuid4()))
def __getattr__(self, attr):
def f(*args, **kwargs):
with self._ctx:
return getattr(workspace, attr)(*args, **kwargs)
return f
def __del__(self):
Reported by Pylint.
caffe2/python/onnx/bin/conversion.py
5 issues
Line: 12
Column: 1
from caffe2.proto import caffe2_pb2
import click
from onnx import ModelProto
from caffe2.python.onnx.backend import Caffe2Backend as c2
import caffe2.python.onnx.frontend as c2_onnx
Reported by Pylint.
Line: 1
Column: 1
## @package onnx
# Module caffe2.python.onnx.bin.conversion
import json
Reported by Pylint.
Line: 14
Column: 1
import click
from onnx import ModelProto
from caffe2.python.onnx.backend import Caffe2Backend as c2
import caffe2.python.onnx.frontend as c2_onnx
@click.command(
help='convert caffe2 net to onnx model',
Reported by Pylint.
Line: 35
Column: 1
type=str,
help='A json string providing the '
'type and shape information of the inputs')
@click.option('-o', '--output', required=True,
type=click.File('wb'),
help='Output path for the onnx model pb file')
def caffe2_to_onnx(caffe2_net,
caffe2_net_name,
caffe2_init_net,
Reported by Pylint.
Line: 80
Column: 1
help='Output path for the caffe2 net file')
@click.option('--init-net-output',
required=True,
type=click.File('wb'),
help='Output path for the caffe2 init net file')
def onnx_to_caffe2(onnx_model, output, init_net_output):
onnx_model_proto = ModelProto()
onnx_model_proto.ParseFromString(onnx_model.read())
Reported by Pylint.
caffe2/python/numa_test.py
5 issues
Line: 1
Column: 1
from caffe2.python import core, workspace
from caffe2.proto import caffe2_pb2
from caffe2.python.test_util import TestCase
import unittest
Reported by Pylint.
Line: 8
Column: 1
from caffe2.python import core, workspace
from caffe2.proto import caffe2_pb2
from caffe2.python.test_util import TestCase
import unittest
core.GlobalInit(["caffe2", "--caffe2_cpu_numa_enabled=1"])
def build_test_net(net_name):
net = core.Net(net_name)
Reported by Pylint.
Line: 12
Column: 1
core.GlobalInit(["caffe2", "--caffe2_cpu_numa_enabled=1"])
def build_test_net(net_name):
net = core.Net(net_name)
net.Proto().type = "async_scheduling"
numa_device_option = caffe2_pb2.DeviceOption()
numa_device_option.device_type = caffe2_pb2.CPU
Reported by Pylint.
Line: 42
Column: 1
@unittest.skipIf(not workspace.IsNUMAEnabled(), "NUMA is not enabled")
@unittest.skipIf(workspace.GetNumNUMANodes() < 2, "Not enough NUMA nodes")
@unittest.skipIf(not workspace.has_gpu_support, "No GPU support")
class NUMATest(TestCase):
def test_numa(self):
net = build_test_net("test_numa")
workspace.RunNetOnce(net)
Reported by Pylint.
Line: 43
Column: 5
@unittest.skipIf(workspace.GetNumNUMANodes() < 2, "Not enough NUMA nodes")
@unittest.skipIf(not workspace.has_gpu_support, "No GPU support")
class NUMATest(TestCase):
def test_numa(self):
net = build_test_net("test_numa")
workspace.RunNetOnce(net)
self.assertEqual(workspace.GetBlobNUMANode("output_blob_0"), 0)
Reported by Pylint.