The following issues were found
torch/backends/__init__.py
15 issues
Line: 10
Column: 5
__allow_nonbracketed_mutation_flag = True
def disable_global_flags():
global __allow_nonbracketed_mutation_flag
__allow_nonbracketed_mutation_flag = False
def flags_frozen():
return not __allow_nonbracketed_mutation_flag
Reported by Pylint.
Line: 18
Column: 5
@contextmanager
def __allow_nonbracketed_mutation():
global __allow_nonbracketed_mutation_flag
old = __allow_nonbracketed_mutation_flag
__allow_nonbracketed_mutation_flag = True
try:
yield
finally:
Reported by Pylint.
Line: 1
Column: 1
from contextlib import contextmanager
import types
# The idea for this parameter is that we forbid bare assignment
# to torch.backends.<cudnn|mkldnn>.enabled and friends when running our
# test suite, where it's very easy to forget to undo the change
# later.
__allow_nonbracketed_mutation_flag = True
def disable_global_flags():
Reported by Pylint.
Line: 7
Column: 1
# to torch.backends.<cudnn|mkldnn>.enabled and friends when running our
# test suite, where it's very easy to forget to undo the change
# later.
__allow_nonbracketed_mutation_flag = True
def disable_global_flags():
global __allow_nonbracketed_mutation_flag
__allow_nonbracketed_mutation_flag = False
Reported by Pylint.
Line: 9
Column: 1
# later.
__allow_nonbracketed_mutation_flag = True
def disable_global_flags():
global __allow_nonbracketed_mutation_flag
__allow_nonbracketed_mutation_flag = False
def flags_frozen():
return not __allow_nonbracketed_mutation_flag
Reported by Pylint.
Line: 10
Column: 5
__allow_nonbracketed_mutation_flag = True
def disable_global_flags():
global __allow_nonbracketed_mutation_flag
__allow_nonbracketed_mutation_flag = False
def flags_frozen():
return not __allow_nonbracketed_mutation_flag
Reported by Pylint.
Line: 13
Column: 1
global __allow_nonbracketed_mutation_flag
__allow_nonbracketed_mutation_flag = False
def flags_frozen():
return not __allow_nonbracketed_mutation_flag
@contextmanager
def __allow_nonbracketed_mutation():
global __allow_nonbracketed_mutation_flag
Reported by Pylint.
Line: 18
Column: 5
@contextmanager
def __allow_nonbracketed_mutation():
global __allow_nonbracketed_mutation_flag
old = __allow_nonbracketed_mutation_flag
__allow_nonbracketed_mutation_flag = True
try:
yield
finally:
Reported by Pylint.
Line: 26
Column: 1
finally:
__allow_nonbracketed_mutation_flag = old
class ContextProp(object):
def __init__(self, getter, setter):
self.getter = getter
self.setter = setter
def __get__(self, obj, objtype):
Reported by Pylint.
Line: 26
Column: 1
finally:
__allow_nonbracketed_mutation_flag = old
class ContextProp(object):
def __init__(self, getter, setter):
self.getter = getter
self.setter = setter
def __get__(self, obj, objtype):
Reported by Pylint.
test/onnx/model_defs/squeezenet.py
15 issues
Line: 1
Column: 1
import torch
import torch.nn as nn
import torch.nn.init as init
class Fire(nn.Module):
def __init__(self, inplanes, squeeze_planes,
expand1x1_planes, expand3x3_planes):
Reported by Pylint.
Line: 2
Column: 1
import torch
import torch.nn as nn
import torch.nn.init as init
class Fire(nn.Module):
def __init__(self, inplanes, squeeze_planes,
expand1x1_planes, expand3x3_planes):
Reported by Pylint.
Line: 3
Column: 1
import torch
import torch.nn as nn
import torch.nn.init as init
class Fire(nn.Module):
def __init__(self, inplanes, squeeze_planes,
expand1x1_planes, expand3x3_planes):
Reported by Pylint.
Line: 1
Column: 1
import torch
import torch.nn as nn
import torch.nn.init as init
class Fire(nn.Module):
def __init__(self, inplanes, squeeze_planes,
expand1x1_planes, expand3x3_planes):
Reported by Pylint.
Line: 6
Column: 1
import torch.nn.init as init
class Fire(nn.Module):
def __init__(self, inplanes, squeeze_planes,
expand1x1_planes, expand3x3_planes):
super(Fire, self).__init__()
self.inplanes = inplanes
Reported by Pylint.
Line: 6
Column: 1
import torch.nn.init as init
class Fire(nn.Module):
def __init__(self, inplanes, squeeze_planes,
expand1x1_planes, expand3x3_planes):
super(Fire, self).__init__()
self.inplanes = inplanes
Reported by Pylint.
Line: 10
Column: 9
def __init__(self, inplanes, squeeze_planes,
expand1x1_planes, expand3x3_planes):
super(Fire, self).__init__()
self.inplanes = inplanes
self.squeeze = nn.Conv2d(inplanes, squeeze_planes, kernel_size=1)
self.squeeze_activation = nn.ReLU(inplace=True)
self.expand1x1 = nn.Conv2d(squeeze_planes, expand1x1_planes,
kernel_size=1)
Reported by Pylint.
Line: 21
Column: 5
kernel_size=3, padding=1)
self.expand3x3_activation = nn.ReLU(inplace=True)
def forward(self, x):
x = self.squeeze_activation(self.squeeze(x))
return torch.cat([
self.expand1x1_activation(self.expand1x1(x)),
self.expand3x3_activation(self.expand3x3(x))
], 1)
Reported by Pylint.
Line: 21
Column: 5
kernel_size=3, padding=1)
self.expand3x3_activation = nn.ReLU(inplace=True)
def forward(self, x):
x = self.squeeze_activation(self.squeeze(x))
return torch.cat([
self.expand1x1_activation(self.expand1x1(x)),
self.expand3x3_activation(self.expand3x3(x))
], 1)
Reported by Pylint.
Line: 29
Column: 1
], 1)
class SqueezeNet(nn.Module):
def __init__(self, version=1.0, num_classes=1000, ceil_mode=False):
super(SqueezeNet, self).__init__()
if version not in [1.0, 1.1]:
raise ValueError("Unsupported SqueezeNet version {version}:"
Reported by Pylint.
torch/contrib/_tensorboard_vis.py
15 issues
Line: 33
Column: 26
value_map = {}
pb_graph = pb_graph or graph_pb2.GraphDef()
if isinstance(graph, torch._C.GraphExecutorState):
visualize_graph_executor(graph, name_prefix, pb_graph,
partial(visualize, pb_graph=pb_graph))
return pb_graph
# Set up an input node
Reported by Pylint.
Line: 39
Column: 5
return pb_graph
# Set up an input node
input_node = pb_graph.node.add(op='input', name=name_prefix + 'input')
for i, value in enumerate(graph.param_node().outputs()):
value_map[value.unique()] = name_prefix + 'input:' + str(i)
visualize_rec(graph, value_map, name_prefix, pb_graph, executors_it)
Reported by Pylint.
Line: 116
Column: 9
return kind, name_prefix + kind + '_' + str(op_id_counter[kind])
def add_fusion_group(node):
op, name = name_for(node)
inline_graph(node.g('Subgraph'), name + '/', node)
def add_graph_executor(node):
op, name = name_for(node)
if executors_it is None:
Reported by Pylint.
Line: 120
Column: 9
inline_graph(node.g('Subgraph'), name + '/', node)
def add_graph_executor(node):
op, name = name_for(node)
if executors_it is None:
add_node(node)
else:
ge = next(executors_it)
visualize_graph_executor(ge, name + '/', pb_graph,
Reported by Pylint.
Line: 137
Column: 3
pb_node = pb_graph.node.add(op=op, name=name)
for value in node.inputs():
pb_node.input.append(value_map[value.unique()])
# TODO: handle attrs
for i, value in enumerate(node.outputs()):
value_map[value.unique()] = name + ':' + str(i)
for node in graph.nodes():
add_node(node)
Reported by Pylint.
Line: 1
Column: 1
import time
from collections import defaultdict
from functools import partial
from typing import DefaultDict
import torch
# Unfortunately it doesn't seem as if there was any way to get TensorBoard to do
Reported by Pylint.
Line: 21
Column: 1
"TensorFlow installed") from None
def dump_tensorboard_summary(graph_executor, logdir):
with FileWriter(logdir) as w:
pb_graph = visualize(graph_executor)
evt = event_pb2.Event(wall_time=time.time(), graph_def=pb_graph.SerializeToString())
w.add_event(evt)
Reported by Pylint.
Line: 22
Column: 32
def dump_tensorboard_summary(graph_executor, logdir):
with FileWriter(logdir) as w:
pb_graph = visualize(graph_executor)
evt = event_pb2.Event(wall_time=time.time(), graph_def=pb_graph.SerializeToString())
w.add_event(evt)
Reported by Pylint.
Line: 116
Column: 9
return kind, name_prefix + kind + '_' + str(op_id_counter[kind])
def add_fusion_group(node):
op, name = name_for(node)
inline_graph(node.g('Subgraph'), name + '/', node)
def add_graph_executor(node):
op, name = name_for(node)
if executors_it is None:
Reported by Pylint.
Line: 120
Column: 9
inline_graph(node.g('Subgraph'), name + '/', node)
def add_graph_executor(node):
op, name = name_for(node)
if executors_it is None:
add_node(node)
else:
ge = next(executors_it)
visualize_graph_executor(ge, name + '/', pb_graph,
Reported by Pylint.
caffe2/python/timeout_guard.py
15 issues
Line: 17
Column: 1
from future.utils import viewitems
'''
Sometimes CUDA devices can get stuck, 'deadlock'. In this case it is often
better just the kill the process automatically. Use this guard to set a
maximum timespan for a python call, such as RunNet(). If it does not complete
in time, process is killed.
Reported by Pylint.
Line: 47
Column: 23
self.condition.release()
if not self.completed:
log = logging.getLogger("timeout_guard")
log.error("Call did not finish in time. Timeout:{}s PID: {}".format(
self.timeout_secs,
os.getpid(),
))
# First try dying cleanly, but in 10 secs, exit properly
Reported by Pylint.
Line: 62
Column: 50
import sys
import traceback
code = []
for threadId, stack in viewitems(sys._current_frames()):
if threadId == self.caller_thread.ident:
code.append("\n# ThreadID: %s" % threadId)
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
Reported by Pylint.
Line: 84
Column: 46
import sys
import traceback
code = []
for threadId, stack in viewitems(sys._current_frames()):
code.append("\n# ThreadID: %s" % threadId)
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
code.append(" %s" % (line.strip()))
Reported by Pylint.
Line: 1
Column: 1
## @package timeout_guard
# Module caffe2.python.timeout_guard
import contextlib
import threading
Reported by Pylint.
Line: 29
Column: 1
'''
class WatcherThread(threading.Thread):
def __init__(self, timeout_secs):
threading.Thread.__init__(self)
self.timeout_secs = timeout_secs
self.completed = False
Reported by Pylint.
Line: 59
Column: 17
print("Caller thread was: {}".format(self.caller_thread))
print("-----After force------")
log.info("-----After force------")
import sys
import traceback
code = []
for threadId, stack in viewitems(sys._current_frames()):
if threadId == self.caller_thread.ident:
code.append("\n# ThreadID: %s" % threadId)
Reported by Pylint.
Line: 60
Column: 17
print("-----After force------")
log.info("-----After force------")
import sys
import traceback
code = []
for threadId, stack in viewitems(sys._current_frames()):
if threadId == self.caller_thread.ident:
code.append("\n# ThreadID: %s" % threadId)
for filename, lineno, name, line in traceback.extract_stack(stack):
Reported by Pylint.
Line: 62
Column: 21
import sys
import traceback
code = []
for threadId, stack in viewitems(sys._current_frames()):
if threadId == self.caller_thread.ident:
code.append("\n# ThreadID: %s" % threadId)
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
Reported by Pylint.
Line: 81
Column: 13
forcet.start()
print("Caller thread was: {}".format(self.caller_thread))
print("-----Before forcing------")
import sys
import traceback
code = []
for threadId, stack in viewitems(sys._current_frames()):
code.append("\n# ThreadID: %s" % threadId)
for filename, lineno, name, line in traceback.extract_stack(stack):
Reported by Pylint.
caffe2/python/operator_test/depthwise_3x3_conv_test.py
15 issues
Line: 9
Column: 1
import numpy as np
import caffe2.python.hypothesis_test_util as hu
from caffe2.python import core, utils
from hypothesis import given, settings
import hypothesis.strategies as st
class Depthwise3x3ConvOpsTest(hu.HypothesisTestCase):
@given(pad=st.integers(0, 1),
Reported by Pylint.
Line: 10
Column: 1
import caffe2.python.hypothesis_test_util as hu
from caffe2.python import core, utils
from hypothesis import given, settings
import hypothesis.strategies as st
class Depthwise3x3ConvOpsTest(hu.HypothesisTestCase):
@given(pad=st.integers(0, 1),
kernel=st.integers(3, 3),
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import caffe2.python.hypothesis_test_util as hu
from caffe2.python import core, utils
from hypothesis import given, settings
Reported by Pylint.
Line: 13
Column: 1
import hypothesis.strategies as st
class Depthwise3x3ConvOpsTest(hu.HypothesisTestCase):
@given(pad=st.integers(0, 1),
kernel=st.integers(3, 3),
size=st.integers(4, 8),
channels=st.integers(2, 4),
batch_size=st.integers(1, 1),
Reported by Pylint.
Line: 24
Column: 5
use_bias=st.booleans(),
**hu.gcs)
@settings(deadline=10000)
def test_convolution_gradients(self, pad, kernel, size,
channels, batch_size,
order, engine, use_bias, gc, dc):
op = core.CreateOperator(
"Conv",
["X", "w", "b"] if use_bias else ["X", "w"],
Reported by Pylint.
Line: 24
Column: 5
use_bias=st.booleans(),
**hu.gcs)
@settings(deadline=10000)
def test_convolution_gradients(self, pad, kernel, size,
channels, batch_size,
order, engine, use_bias, gc, dc):
op = core.CreateOperator(
"Conv",
["X", "w", "b"] if use_bias else ["X", "w"],
Reported by Pylint.
Line: 24
Column: 5
use_bias=st.booleans(),
**hu.gcs)
@settings(deadline=10000)
def test_convolution_gradients(self, pad, kernel, size,
channels, batch_size,
order, engine, use_bias, gc, dc):
op = core.CreateOperator(
"Conv",
["X", "w", "b"] if use_bias else ["X", "w"],
Reported by Pylint.
Line: 24
Column: 5
use_bias=st.booleans(),
**hu.gcs)
@settings(deadline=10000)
def test_convolution_gradients(self, pad, kernel, size,
channels, batch_size,
order, engine, use_bias, gc, dc):
op = core.CreateOperator(
"Conv",
["X", "w", "b"] if use_bias else ["X", "w"],
Reported by Pylint.
Line: 24
Column: 5
use_bias=st.booleans(),
**hu.gcs)
@settings(deadline=10000)
def test_convolution_gradients(self, pad, kernel, size,
channels, batch_size,
order, engine, use_bias, gc, dc):
op = core.CreateOperator(
"Conv",
["X", "w", "b"] if use_bias else ["X", "w"],
Reported by Pylint.
Line: 27
Column: 9
def test_convolution_gradients(self, pad, kernel, size,
channels, batch_size,
order, engine, use_bias, gc, dc):
op = core.CreateOperator(
"Conv",
["X", "w", "b"] if use_bias else ["X", "w"],
["Y"],
kernel=kernel,
pad=pad,
Reported by Pylint.
caffe2/python/predictor/predictor_exporter_test.py
15 issues
Line: 1
Column: 1
import tempfile
import unittest
import numpy as np
from caffe2.python import cnn, workspace, core
Reported by Pylint.
Line: 12
Column: 1
from caffe2.python import cnn, workspace, core
from future.utils import viewitems
from caffe2.python.predictor_constants import predictor_constants as pc
import caffe2.python.predictor.predictor_exporter as pe
import caffe2.python.predictor.predictor_py_utils as pred_utils
from caffe2.proto import caffe2_pb2, metanet_pb2
Reported by Pylint.
Line: 18
Column: 1
from caffe2.proto import caffe2_pb2, metanet_pb2
class MetaNetDefTest(unittest.TestCase):
def test_minimal(self):
'''
Tests that a NetsMap message can be created with a NetDef message
'''
# This calls the constructor for a metanet_pb2.NetsMap
Reported by Pylint.
Line: 19
Column: 5
class MetaNetDefTest(unittest.TestCase):
def test_minimal(self):
'''
Tests that a NetsMap message can be created with a NetDef message
'''
# This calls the constructor for a metanet_pb2.NetsMap
metanet_pb2.NetsMap(key="test_key", value=caffe2_pb2.NetDef())
Reported by Pylint.
Line: 26
Column: 5
# This calls the constructor for a metanet_pb2.NetsMap
metanet_pb2.NetsMap(key="test_key", value=caffe2_pb2.NetDef())
def test_adding_net(self):
'''
Tests that NetDefs can be added to MetaNetDefs
'''
meta_net_def = metanet_pb2.MetaNetDef()
net_def = caffe2_pb2.NetDef()
Reported by Pylint.
Line: 52
Column: 1
self.assertEqual(replaced_blob_def, pred_utils.GetBlobs(meta_net_def, blob_name))
class PredictorExporterTest(unittest.TestCase):
def _create_model(self):
m = cnn.CNNModelHelper()
m.FC("data", "y",
dim_in=5, dim_out=10,
weight_init=m.XavierInit,
Reported by Pylint.
Line: 53
Column: 5
class PredictorExporterTest(unittest.TestCase):
def _create_model(self):
m = cnn.CNNModelHelper()
m.FC("data", "y",
dim_in=5, dim_out=10,
weight_init=m.XavierInit,
bias_init=m.XavierInit)
Reported by Pylint.
Line: 54
Column: 9
class PredictorExporterTest(unittest.TestCase):
def _create_model(self):
m = cnn.CNNModelHelper()
m.FC("data", "y",
dim_in=5, dim_out=10,
weight_init=m.XavierInit,
bias_init=m.XavierInit)
return m
Reported by Pylint.
Line: 63
Column: 9
def setUp(self):
np.random.seed(1)
m = self._create_model()
self.predictor_export_meta = pe.PredictorExportMeta(
predict_net=m.net.Proto(),
parameters=[str(b) for b in m.params],
inputs=["data"],
Reported by Pylint.
Line: 84
Column: 9
'''
Test that passing net itself instead of proto works
'''
m = self._create_model()
pe.PredictorExportMeta(
predict_net=m.net,
parameters=m.params,
inputs=["data"],
outputs=["y"],
Reported by Pylint.
scripts/release_notes/categorize.py
15 issues
Line: 24
Column: 13
while i < len(commits):
cur_commit = commits[i]
next_commit = commits[i + 1] if i + 1 < len(commits) else None
jump_to = self.handle_commit(cur_commit, already_done + i + 1, total_commits, commits)
# Increment counter
if jump_to is not None:
i = jump_to
elif next_commit is None:
Reported by Pylint.
Line: 1
Column: 1
import argparse
import os
import textwrap
from common import categories, topics, CommitDataCache
from commitlist import CommitList
class Categorizer:
def __init__(self, path, category='Uncategorized'):
self.cache = CommitDataCache()
Reported by Pylint.
Line: 7
Column: 1
from common import categories, topics, CommitDataCache
from commitlist import CommitList
class Categorizer:
def __init__(self, path, category='Uncategorized'):
self.cache = CommitDataCache()
self.commits = CommitList.from_existing(path)
# Special categories: 'Uncategorized'
Reported by Pylint.
Line: 16
Column: 5
# All other categories must be real
self.category = category
def categorize(self):
commits = self.commits.filter(category=self.category)
total_commits = len(self.commits.commits)
already_done = total_commits - len(commits)
i = 0
while i < len(commits):
Reported by Pylint.
Line: 34
Column: 5
else:
i = commits.index(next_commit)
def features(self, commit):
return self.cache.get(commit.commit_hash)
def potential_reverts_of(self, commit, commits):
submodule_update_str = ['Update TensorPipe submodule',
'Updating submodules',
Reported by Pylint.
Line: 37
Column: 5
def features(self, commit):
return self.cache.get(commit.commit_hash)
def potential_reverts_of(self, commit, commits):
submodule_update_str = ['Update TensorPipe submodule',
'Updating submodules',
'Automated submodule update']
if any(a in commit.title for a in submodule_update_str):
return []
Reported by Pylint.
Line: 59
Column: 5
commit.commit_hash != cand.commit_hash})
return reasons
def handle_commit(self, commit, i, total, commits):
potential_reverts = self.potential_reverts_of(commit, commits)
if potential_reverts:
potential_reverts = f'!!!POTENTIAL REVERTS!!!: {potential_reverts}'
else:
potential_reverts = ""
Reported by Pylint.
Line: 59
Column: 5
commit.commit_hash != cand.commit_hash})
return reasons
def handle_commit(self, commit, i, total, commits):
potential_reverts = self.potential_reverts_of(commit, commits)
if potential_reverts:
potential_reverts = f'!!!POTENTIAL REVERTS!!!: {potential_reverts}'
else:
potential_reverts = ""
Reported by Pylint.
Line: 75
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b605_start_process_with_a_shell.html
if 'module: deprecation' in features.labels:
breaking_alarm += "\n!!!!!! DEPRECATION !!!!!!"
os.system('clear')
view = textwrap.dedent(f'''\
[{i}/{total}]
================================================================================
{features.title}
Reported by Bandit.
Line: 75
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b607_start_process_with_partial_path.html
if 'module: deprecation' in features.labels:
breaking_alarm += "\n!!!!!! DEPRECATION !!!!!!"
os.system('clear')
view = textwrap.dedent(f'''\
[{i}/{total}]
================================================================================
{features.title}
Reported by Bandit.
caffe2/python/normalizer.py
15 issues
Line: 9
Column: 5
class Normalizer(object):
def __init__(self):
pass
"""
Adds normalization to train_net for given parameter. Its factor ahead of
regularization is given when initialization.
The param should be a BlobReference.
"""
Reported by Pylint.
Line: 28
Column: 5
self._momentum = float(momentum)
self._scale_init_value = float(scale_init_value)
def _run(self, layer_model, param):
return layer_model.BatchNormalization(
param, momentum=self._momentum, scale_init_value=self._scale_init_value
)
Reported by Pylint.
Line: 41
Column: 5
self._use_layer_norm_op = use_layer_norm_op
self._scale_init_value = float(scale_init_value)
def _run(self, layer_model, param):
return layer_model.LayerNormalization(
param, epsilon=self._epsilon, use_layer_norm_op=self._use_layer_norm_op, scale_init_value=self._scale_init_value
)
Reported by Pylint.
Line: 1
Column: 1
# @package optimizer
# Module caffe2.python.normalizer
class Normalizer(object):
def __init__(self):
pass
"""
Reported by Pylint.
Line: 6
Column: 1
class Normalizer(object):
def __init__(self):
pass
"""
Adds normalization to train_net for given parameter. Its factor ahead of
regularization is given when initialization.
Reported by Pylint.
Line: 6
Column: 1
class Normalizer(object):
def __init__(self):
pass
"""
Adds normalization to train_net for given parameter. Its factor ahead of
regularization is given when initialization.
Reported by Pylint.
Line: 6
Column: 1
class Normalizer(object):
def __init__(self):
pass
"""
Adds normalization to train_net for given parameter. Its factor ahead of
regularization is given when initialization.
Reported by Pylint.
Line: 18
Column: 5
def __call__(self, net, param):
return self._run(net, param)
def _run(self, net, param):
raise Exception("Not Impelemented")
class BatchNormalizer(Normalizer):
def __init__(self, momentum, scale_init_value=1.0):
Reported by Pylint.
Line: 22
Column: 1
raise Exception("Not Impelemented")
class BatchNormalizer(Normalizer):
def __init__(self, momentum, scale_init_value=1.0):
super(BatchNormalizer, self).__init__()
self._momentum = float(momentum)
self._scale_init_value = float(scale_init_value)
Reported by Pylint.
Line: 22
Column: 1
raise Exception("Not Impelemented")
class BatchNormalizer(Normalizer):
def __init__(self, momentum, scale_init_value=1.0):
super(BatchNormalizer, self).__init__()
self._momentum = float(momentum)
self._scale_init_value = float(scale_init_value)
Reported by Pylint.
caffe2/python/operator_test/lars_test.py
15 issues
Line: 7
Column: 1
from caffe2.python import core
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
Reported by Pylint.
Line: 9
Column: 1
from caffe2.python import core
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
class TestLars(hu.HypothesisTestCase):
Reported by Pylint.
Line: 18
Column: 41
@given(offset=st.floats(min_value=0, max_value=100),
lr_min=st.floats(min_value=1e-8, max_value=1e-6),
**hu.gcs)
def test_lars(self, offset, lr_min, dc, gc):
X = np.random.rand(6, 7, 8, 9).astype(np.float32)
dX = np.random.rand(6, 7, 8, 9).astype(np.float32)
wd = np.array([1e-4]).astype(np.float32)
trust = np.random.rand(1).astype(np.float32)
lr_max = np.random.rand(1).astype(np.float32)
Reported by Pylint.
Line: 1
Column: 1
from caffe2.python import core
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
Reported by Pylint.
Line: 13
Column: 1
import numpy as np
class TestLars(hu.HypothesisTestCase):
@given(offset=st.floats(min_value=0, max_value=100),
lr_min=st.floats(min_value=1e-8, max_value=1e-6),
**hu.gcs)
def test_lars(self, offset, lr_min, dc, gc):
Reported by Pylint.
Line: 18
Column: 5
@given(offset=st.floats(min_value=0, max_value=100),
lr_min=st.floats(min_value=1e-8, max_value=1e-6),
**hu.gcs)
def test_lars(self, offset, lr_min, dc, gc):
X = np.random.rand(6, 7, 8, 9).astype(np.float32)
dX = np.random.rand(6, 7, 8, 9).astype(np.float32)
wd = np.array([1e-4]).astype(np.float32)
trust = np.random.rand(1).astype(np.float32)
lr_max = np.random.rand(1).astype(np.float32)
Reported by Pylint.
Line: 18
Column: 5
@given(offset=st.floats(min_value=0, max_value=100),
lr_min=st.floats(min_value=1e-8, max_value=1e-6),
**hu.gcs)
def test_lars(self, offset, lr_min, dc, gc):
X = np.random.rand(6, 7, 8, 9).astype(np.float32)
dX = np.random.rand(6, 7, 8, 9).astype(np.float32)
wd = np.array([1e-4]).astype(np.float32)
trust = np.random.rand(1).astype(np.float32)
lr_max = np.random.rand(1).astype(np.float32)
Reported by Pylint.
Line: 18
Column: 5
@given(offset=st.floats(min_value=0, max_value=100),
lr_min=st.floats(min_value=1e-8, max_value=1e-6),
**hu.gcs)
def test_lars(self, offset, lr_min, dc, gc):
X = np.random.rand(6, 7, 8, 9).astype(np.float32)
dX = np.random.rand(6, 7, 8, 9).astype(np.float32)
wd = np.array([1e-4]).astype(np.float32)
trust = np.random.rand(1).astype(np.float32)
lr_max = np.random.rand(1).astype(np.float32)
Reported by Pylint.
Line: 19
Column: 9
lr_min=st.floats(min_value=1e-8, max_value=1e-6),
**hu.gcs)
def test_lars(self, offset, lr_min, dc, gc):
X = np.random.rand(6, 7, 8, 9).astype(np.float32)
dX = np.random.rand(6, 7, 8, 9).astype(np.float32)
wd = np.array([1e-4]).astype(np.float32)
trust = np.random.rand(1).astype(np.float32)
lr_max = np.random.rand(1).astype(np.float32)
Reported by Pylint.
Line: 20
Column: 9
**hu.gcs)
def test_lars(self, offset, lr_min, dc, gc):
X = np.random.rand(6, 7, 8, 9).astype(np.float32)
dX = np.random.rand(6, 7, 8, 9).astype(np.float32)
wd = np.array([1e-4]).astype(np.float32)
trust = np.random.rand(1).astype(np.float32)
lr_max = np.random.rand(1).astype(np.float32)
def ref_lars(X, dX, wd, trust, lr_max):
Reported by Pylint.
caffe2/python/operator_test/rms_norm_op_test.py
15 issues
Line: 4
Column: 1
from caffe2.python import core
from hypothesis import given, settings
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
Reported by Pylint.
Line: 7
Column: 1
from hypothesis import given, settings
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
import unittest
Reported by Pylint.
Line: 1
Column: 1
from caffe2.python import core
from hypothesis import given, settings
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
Reported by Pylint.
Line: 10
Column: 1
import hypothesis.strategies as st
import numpy as np
import unittest
class TestRMSNormOp(hu.HypothesisTestCase):
@given(
M=st.integers(0, 8),
Reported by Pylint.
Line: 13
Column: 1
import unittest
class TestRMSNormOp(hu.HypothesisTestCase):
@given(
M=st.integers(0, 8),
N=st.integers(1, 16),
eps=st.floats(0, 1e-3),
dtype=st.sampled_from([np.float32, np.float64]),
Reported by Pylint.
Line: 21
Column: 5
dtype=st.sampled_from([np.float32, np.float64]),
**hu.gcs,
)
@settings(deadline=None)
def test_rms_norm(self, M, N, eps, dtype, gc, dc):
X = (np.random.randn(M, N) * 2.0 + 1.0).astype(dtype)
gamma = np.random.randn(N).astype(dtype)
beta = np.random.randn(N).astype(dtype)
Reported by Pylint.
Line: 21
Column: 5
dtype=st.sampled_from([np.float32, np.float64]),
**hu.gcs,
)
@settings(deadline=None)
def test_rms_norm(self, M, N, eps, dtype, gc, dc):
X = (np.random.randn(M, N) * 2.0 + 1.0).astype(dtype)
gamma = np.random.randn(N).astype(dtype)
beta = np.random.randn(N).astype(dtype)
Reported by Pylint.
Line: 21
Column: 5
dtype=st.sampled_from([np.float32, np.float64]),
**hu.gcs,
)
@settings(deadline=None)
def test_rms_norm(self, M, N, eps, dtype, gc, dc):
X = (np.random.randn(M, N) * 2.0 + 1.0).astype(dtype)
gamma = np.random.randn(N).astype(dtype)
beta = np.random.randn(N).astype(dtype)
Reported by Pylint.
Line: 21
Column: 5
dtype=st.sampled_from([np.float32, np.float64]),
**hu.gcs,
)
@settings(deadline=None)
def test_rms_norm(self, M, N, eps, dtype, gc, dc):
X = (np.random.randn(M, N) * 2.0 + 1.0).astype(dtype)
gamma = np.random.randn(N).astype(dtype)
beta = np.random.randn(N).astype(dtype)
Reported by Pylint.
Line: 21
Column: 5
dtype=st.sampled_from([np.float32, np.float64]),
**hu.gcs,
)
@settings(deadline=None)
def test_rms_norm(self, M, N, eps, dtype, gc, dc):
X = (np.random.randn(M, N) * 2.0 + 1.0).astype(dtype)
gamma = np.random.randn(N).astype(dtype)
beta = np.random.randn(N).astype(dtype)
Reported by Pylint.