The following issues were found
keras/optimizer_v2/adadelta_test.py
114 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for Adadelta Optimizer."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
from keras import combinations
from keras.optimizer_v2 import adadelta
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
from keras import combinations
from keras.optimizer_v2 import adadelta
_DATA_TYPES = [
Reported by Pylint.
Line: 21
Column: 1
from absl.testing import parameterized
import numpy as np
from keras import combinations
from keras.optimizer_v2 import adadelta
_DATA_TYPES = [
tf.half, tf.float32, tf.float64, tf.complex64,
tf.complex128
Reported by Pylint.
Line: 22
Column: 1
from absl.testing import parameterized
import numpy as np
from keras import combinations
from keras.optimizer_v2 import adadelta
_DATA_TYPES = [
tf.half, tf.float32, tf.float64, tf.complex64,
tf.complex128
]
Reported by Pylint.
Line: 106
Column: 3
if not tf.executing_eagerly():
# Check that the accumulators have been updated
# TODO(lxuechen): This is hard to test in eager mode
for slot_idx in range(2):
self.assertAllCloseAccordingToType(
np.array([accum, accum], dtype=dtype.as_numpy_dtype(0)),
self.evaluate(slot[slot_idx]),
rtol=1e-5)
Reported by Pylint.
Line: 144
Column: 3
self.doTestBasic(use_resource=True, use_callable_params=True)
def testMinimizeSparseResourceVariable(self):
# TODO(tanzheny, omalleyt): Fix test in eager mode.
with tf.Graph().as_default():
for dtype in _DATA_TYPES:
var0 = tf.Variable([[1.0, 2.0]], dtype=dtype)
x = tf.constant([[4.0], [5.0]], dtype=dtype)
Reported by Pylint.
Line: 30
Column: 1
]
class AdadeltaOptimizerTest(tf.test.TestCase, parameterized.TestCase):
def doTestBasic(self, use_resource=False, use_callable_params=False):
num_updates = 4 # number of ADADELTA steps to perform
for dtype in _DATA_TYPES:
for grad in [0.2, 0.1, 0.01]:
Reported by Pylint.
Line: 32
Column: 3
class AdadeltaOptimizerTest(tf.test.TestCase, parameterized.TestCase):
def doTestBasic(self, use_resource=False, use_callable_params=False):
num_updates = 4 # number of ADADELTA steps to perform
for dtype in _DATA_TYPES:
for grad in [0.2, 0.1, 0.01]:
for lr in [1.0, 0.5, 0.1]:
var0_init = [1.0, 2.0]
Reported by Pylint.
Line: 32
Column: 3
class AdadeltaOptimizerTest(tf.test.TestCase, parameterized.TestCase):
def doTestBasic(self, use_resource=False, use_callable_params=False):
num_updates = 4 # number of ADADELTA steps to perform
for dtype in _DATA_TYPES:
for grad in [0.2, 0.1, 0.01]:
for lr in [1.0, 0.5, 0.1]:
var0_init = [1.0, 2.0]
Reported by Pylint.
Line: 32
Column: 3
class AdadeltaOptimizerTest(tf.test.TestCase, parameterized.TestCase):
def doTestBasic(self, use_resource=False, use_callable_params=False):
num_updates = 4 # number of ADADELTA steps to perform
for dtype in _DATA_TYPES:
for grad in [0.2, 0.1, 0.01]:
for lr in [1.0, 0.5, 0.1]:
var0_init = [1.0, 2.0]
Reported by Pylint.
keras/distribute/dataset_creator_model_fit_test_base.py
114 issues
Line: 18
Column: 1
# ==============================================================================
"""Tests for `DatasetCreator` with `Model.fit` across usages and strategies."""
import tensorflow.compat.v2 as tf
import os
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 22
Column: 1
import os
from absl.testing import parameterized
import numpy as np
import keras
from keras import callbacks as callbacks_lib
from keras.engine import sequential
Reported by Pylint.
Line: 32
Column: 1
from keras.layers.preprocessing import string_lookup
from keras.optimizer_v2 import gradient_descent
from keras.utils import dataset_creator
from tensorflow.python.platform import tf_logging as logging
class DatasetCreatorModelFitTestBase(tf.test.TestCase, parameterized.TestCase):
"""The base class for DatasetCreator with Model.fit tests."""
Reported by Pylint.
Line: 73
Column: 22
steps_per_execution=1,
run_eagerly=False,
with_normalization_layer=False,
use_lookup_layer=False):
class ResultAssertingCallback(callbacks_lib.Callback):
"""A callback that asserts the result of the tests."""
def __init__(self):
Reported by Pylint.
Line: 78
Column: 7
class ResultAssertingCallback(callbacks_lib.Callback):
"""A callback that asserts the result of the tests."""
def __init__(self):
self._prev_epoch = -1
def on_epoch_end(self, epoch, logs=None):
logging.info("testModelFit: epoch=%r, logs=%r", epoch, logs)
if epoch <= self._prev_epoch:
Reported by Pylint.
Line: 99
Column: 7
axis=-1, input_shape=(4, 4, 3), momentum=0.8)
model.add(norm)
model.add(core_layers.Dense(1, activation="sigmoid"))
self._accuracy_metric = keras.metrics.Accuracy()
model.compile(
gradient_descent.SGD(),
loss="binary_crossentropy",
metrics=[self._accuracy_metric],
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
import os
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 35
Column: 1
from tensorflow.python.platform import tf_logging as logging
class DatasetCreatorModelFitTestBase(tf.test.TestCase, parameterized.TestCase):
"""The base class for DatasetCreator with Model.fit tests."""
def _get_dataset_fn(self, use_lookup_layer):
if use_lookup_layer:
Reported by Pylint.
Line: 36
Column: 1
class DatasetCreatorModelFitTestBase(tf.test.TestCase, parameterized.TestCase):
"""The base class for DatasetCreator with Model.fit tests."""
def _get_dataset_fn(self, use_lookup_layer):
if use_lookup_layer:
Reported by Pylint.
Line: 38
Column: 1
class DatasetCreatorModelFitTestBase(tf.test.TestCase, parameterized.TestCase):
"""The base class for DatasetCreator with Model.fit tests."""
def _get_dataset_fn(self, use_lookup_layer):
if use_lookup_layer:
filepath = os.path.join(self.get_temp_dir(), "vocab")
with open(filepath, "w") as f:
Reported by Pylint.
keras/distribute/multi_worker_testing_utils.py
111 issues
Line: 17
Column: 1
# ==============================================================================
"""Utilities for testing multi-worker distribution strategies with Keras."""
import tensorflow.compat.v2 as tf
import threading
import unittest
import keras
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
Reported by Pylint.
Line: 22
Column: 1
import threading
import unittest
import keras
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from keras.optimizer_v2 import gradient_descent
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training.server_lib import ClusterSpec
Reported by Pylint.
Line: 24
Column: 1
import keras
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from keras.optimizer_v2 import gradient_descent
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training.server_lib import ClusterSpec
_portpicker_import_error = None
try:
Reported by Pylint.
Line: 25
Column: 1
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from keras.optimizer_v2 import gradient_descent
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training.server_lib import ClusterSpec
_portpicker_import_error = None
try:
import portpicker # pylint: disable=g-import-not-at-top
Reported by Pylint.
Line: 30
Column: 1
_portpicker_import_error = None
try:
import portpicker # pylint: disable=g-import-not-at-top
except (ImportError, ModuleNotFoundError) as _error: # pylint: disable=invalid-name
_portpicker_import_error = _error
portpicker = None
ASSIGNED_PORTS = set()
Reported by Pylint.
Line: 70
Column: 26
32,
kernel_size=(3, 3),
activation="relu",
kernel_initializer=keras.initializers.TruncatedNormal(seed=99))(inputs)
x = keras.layers.BatchNormalization()(x)
x = keras.layers.Flatten()(x) + keras.layers.Flatten()(x)
x = keras.layers.Dense(
10,
activation="softmax",
Reported by Pylint.
Line: 76
Column: 26
x = keras.layers.Dense(
10,
activation="softmax",
kernel_initializer=keras.initializers.TruncatedNormal(seed=99))(x)
model = keras.Model(inputs=inputs, outputs=x)
# TODO(yuefengz): optimizer with slot variables doesn't work because of
# optimizer's bug.
# TODO(yuefengz): we should not allow non-v2 optimizer.
Reported by Pylint.
Line: 79
Column: 3
kernel_initializer=keras.initializers.TruncatedNormal(seed=99))(x)
model = keras.Model(inputs=inputs, outputs=x)
# TODO(yuefengz): optimizer with slot variables doesn't work because of
# optimizer's bug.
# TODO(yuefengz): we should not allow non-v2 optimizer.
model.compile(
loss=keras.losses.sparse_categorical_crossentropy,
optimizer=gradient_descent.SGD(learning_rate=0.001),
Reported by Pylint.
Line: 81
Column: 3
# TODO(yuefengz): optimizer with slot variables doesn't work because of
# optimizer's bug.
# TODO(yuefengz): we should not allow non-v2 optimizer.
model.compile(
loss=keras.losses.sparse_categorical_crossentropy,
optimizer=gradient_descent.SGD(learning_rate=0.001),
metrics=["accuracy"])
return model
Reported by Pylint.
Line: 100
Column: 3
if _portpicker_import_error:
raise _portpicker_import_error # pylint: disable=raising-bad-type
global ASSIGNED_PORTS
with lock:
while True:
try:
port = portpicker.pick_unused_port()
except portpicker.NoFreePortFoundError:
Reported by Pylint.
keras/regularizers_test.py
111 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for Keras regularizers."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
from keras import regularizers
Reported by Pylint.
Line: 33
Column: 1
NUM_CLASSES = 2
class KerasRegularizersTest(keras_parameterized.TestCase,
parameterized.TestCase):
def create_model(self, kernel_regularizer=None, activity_regularizer=None):
model = keras.models.Sequential()
model.add(keras.layers.Dense(NUM_CLASSES,
Reported by Pylint.
Line: 36
Column: 1
class KerasRegularizersTest(keras_parameterized.TestCase,
parameterized.TestCase):
def create_model(self, kernel_regularizer=None, activity_regularizer=None):
model = keras.models.Sequential()
model.add(keras.layers.Dense(NUM_CLASSES,
kernel_regularizer=kernel_regularizer,
activity_regularizer=activity_regularizer,
input_shape=(DATA_DIM,)))
Reported by Pylint.
Line: 36
Column: 3
class KerasRegularizersTest(keras_parameterized.TestCase,
parameterized.TestCase):
def create_model(self, kernel_regularizer=None, activity_regularizer=None):
model = keras.models.Sequential()
model.add(keras.layers.Dense(NUM_CLASSES,
kernel_regularizer=kernel_regularizer,
activity_regularizer=activity_regularizer,
input_shape=(DATA_DIM,)))
Reported by Pylint.
Line: 36
Column: 3
class KerasRegularizersTest(keras_parameterized.TestCase,
parameterized.TestCase):
def create_model(self, kernel_regularizer=None, activity_regularizer=None):
model = keras.models.Sequential()
model.add(keras.layers.Dense(NUM_CLASSES,
kernel_regularizer=kernel_regularizer,
activity_regularizer=activity_regularizer,
input_shape=(DATA_DIM,)))
Reported by Pylint.
Line: 37
Column: 1
parameterized.TestCase):
def create_model(self, kernel_regularizer=None, activity_regularizer=None):
model = keras.models.Sequential()
model.add(keras.layers.Dense(NUM_CLASSES,
kernel_regularizer=kernel_regularizer,
activity_regularizer=activity_regularizer,
input_shape=(DATA_DIM,)))
return model
Reported by Pylint.
Line: 38
Column: 1
def create_model(self, kernel_regularizer=None, activity_regularizer=None):
model = keras.models.Sequential()
model.add(keras.layers.Dense(NUM_CLASSES,
kernel_regularizer=kernel_regularizer,
activity_regularizer=activity_regularizer,
input_shape=(DATA_DIM,)))
return model
Reported by Pylint.
Line: 42
Column: 1
kernel_regularizer=kernel_regularizer,
activity_regularizer=activity_regularizer,
input_shape=(DATA_DIM,)))
return model
def get_data(self):
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=10,
test_samples=10,
Reported by Pylint.
Line: 44
Column: 1
input_shape=(DATA_DIM,)))
return model
def get_data(self):
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=10,
test_samples=10,
input_shape=(DATA_DIM,),
num_classes=NUM_CLASSES)
Reported by Pylint.
keras/applications/xception.py
111 issues
Line: 26
Column: 1
https://arxiv.org/abs/1610.02357) (CVPR 2017)
"""
import tensorflow.compat.v2 as tf
from keras import backend
from keras.applications import imagenet_utils
from keras.engine import training
from keras.layers import VersionAwareLayers
Reported by Pylint.
Line: 34
Column: 1
from keras.layers import VersionAwareLayers
from keras.utils import data_utils
from keras.utils import layer_utils
from tensorflow.python.util.tf_export import keras_export
TF_WEIGHTS_PATH = (
'https://storage.googleapis.com/tensorflow/keras-applications/'
'xception/xception_weights_tf_dim_ordering_tf_kernels.h5')
Reported by Pylint.
Line: 49
Column: 1
@keras_export('keras.applications.xception.Xception',
'keras.applications.Xception')
def Xception(
include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
Reported by Pylint.
Line: 49
Column: 1
@keras_export('keras.applications.xception.Xception',
'keras.applications.Xception')
def Xception(
include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
Reported by Pylint.
Line: 49
Column: 1
@keras_export('keras.applications.xception.Xception',
'keras.applications.Xception')
def Xception(
include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
Reported by Pylint.
Line: 49
Column: 1
@keras_export('keras.applications.xception.Xception',
'keras.applications.Xception')
def Xception(
include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
Reported by Pylint.
Line: 57
Column: 1
pooling=None,
classes=1000,
classifier_activation='softmax'):
"""Instantiates the Xception architecture.
Reference:
- [Xception: Deep Learning with Depthwise Separable Convolutions](
https://arxiv.org/abs/1610.02357) (CVPR 2017)
Reported by Pylint.
Line: 116
Column: 1
Returns:
A `keras.Model` instance.
"""
if not (weights in {'imagenet', None} or tf.io.gfile.exists(weights)):
raise ValueError('The `weights` argument should be either '
'`None` (random initialization), `imagenet` '
'(pre-training on ImageNet), '
'or the path to the weights file to be loaded.')
Reported by Pylint.
Line: 117
Column: 1
A `keras.Model` instance.
"""
if not (weights in {'imagenet', None} or tf.io.gfile.exists(weights)):
raise ValueError('The `weights` argument should be either '
'`None` (random initialization), `imagenet` '
'(pre-training on ImageNet), '
'or the path to the weights file to be loaded.')
if weights == 'imagenet' and include_top and classes != 1000:
Reported by Pylint.
Line: 122
Column: 1
'(pre-training on ImageNet), '
'or the path to the weights file to be loaded.')
if weights == 'imagenet' and include_top and classes != 1000:
raise ValueError('If using `weights` as `"imagenet"` with `include_top`'
' as true, `classes` should be 1000')
# Determine proper input shape
input_shape = imagenet_utils.obtain_input_shape(
Reported by Pylint.
keras/layers/serialization_test.py
110 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for layer serialization utils."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import keras
from keras import combinations
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import keras
from keras import combinations
from keras.layers import recurrent as rnn_v1
from keras.layers import recurrent_v2 as rnn_v2
Reported by Pylint.
Line: 55
Column: 24
keras.regularizers.L2)
if tf.__internal__.tf2.enabled():
self.assertEqual(new_layer.kernel_initializer.__class__,
keras.initializers.OnesV2)
else:
self.assertEqual(new_layer.kernel_initializer.__class__,
keras.initializers.Ones)
self.assertEqual(new_layer.units, 3)
Reported by Pylint.
Line: 58
Column: 24
keras.initializers.OnesV2)
else:
self.assertEqual(new_layer.kernel_initializer.__class__,
keras.initializers.Ones)
self.assertEqual(new_layer.units, 3)
def test_implicit_serialize_deserialize_fails_without_object(self):
layer = keras.layers.Dense(
SerializableInt(3),
Reported by Pylint.
Line: 90
Column: 24
keras.regularizers.L2)
if tf.__internal__.tf2.enabled():
self.assertEqual(new_layer.kernel_initializer.__class__,
keras.initializers.OnesV2)
else:
self.assertEqual(new_layer.kernel_initializer.__class__,
keras.initializers.Ones)
self.assertEqual(new_layer.units.__class__, SerializableInt)
self.assertEqual(new_layer.units, 3)
Reported by Pylint.
Line: 93
Column: 24
keras.initializers.OnesV2)
else:
self.assertEqual(new_layer.kernel_initializer.__class__,
keras.initializers.Ones)
self.assertEqual(new_layer.units.__class__, SerializableInt)
self.assertEqual(new_layer.units, 3)
@parameterized.parameters(
[batchnorm_v1.BatchNormalization, batchnorm_v2.BatchNormalization])
Reported by Pylint.
Line: 109
Column: 24
if tf.__internal__.tf2.enabled():
self.assertIsInstance(new_layer, batchnorm_v2.BatchNormalization)
self.assertEqual(new_layer.beta_initializer.__class__,
keras.initializers.ZerosV2)
else:
self.assertIsInstance(new_layer, batchnorm_v1.BatchNormalization)
self.assertEqual(new_layer.beta_initializer.__class__,
keras.initializers.Zeros)
self.assertEqual(new_layer.gamma_regularizer.__class__,
Reported by Pylint.
Line: 113
Column: 24
else:
self.assertIsInstance(new_layer, batchnorm_v1.BatchNormalization)
self.assertEqual(new_layer.beta_initializer.__class__,
keras.initializers.Zeros)
self.assertEqual(new_layer.gamma_regularizer.__class__,
keras.regularizers.L2)
@parameterized.parameters(
[batchnorm_v1.BatchNormalization, batchnorm_v2.BatchNormalization])
Reported by Pylint.
Line: 128
Column: 24
if tf.__internal__.tf2.enabled():
self.assertIsInstance(new_layer, batchnorm_v2.BatchNormalization)
self.assertEqual(new_layer.beta_initializer.__class__,
keras.initializers.ZerosV2)
else:
self.assertIsInstance(new_layer, batchnorm_v1.BatchNormalization)
self.assertEqual(new_layer.beta_initializer.__class__,
keras.initializers.Zeros)
self.assertEqual(new_layer.gamma_regularizer.__class__,
Reported by Pylint.
Line: 132
Column: 24
else:
self.assertIsInstance(new_layer, batchnorm_v1.BatchNormalization)
self.assertEqual(new_layer.beta_initializer.__class__,
keras.initializers.Zeros)
self.assertEqual(new_layer.gamma_regularizer.__class__,
keras.regularizers.L2)
@parameterized.parameters([rnn_v1.LSTM, rnn_v2.LSTM])
def test_serialize_deserialize_lstm(self, layer):
Reported by Pylint.
keras/engine/base_preprocessing_layer.py
109 issues
Line: 22
Column: 1
from keras.engine import data_adapter
from keras.engine.base_layer import Layer
from keras.utils import version_utils
import tensorflow.compat.v2 as tf
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.eager import context
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
Reported by Pylint.
Line: 23
Column: 1
from keras.engine.base_layer import Layer
from keras.utils import version_utils
import tensorflow.compat.v2 as tf
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.eager import context
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
Reported by Pylint.
Line: 24
Column: 1
from keras.utils import version_utils
import tensorflow.compat.v2 as tf
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.eager import context
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
keras_kpl_gauge = tf.__internal__.monitoring.BoolGauge(
Reported by Pylint.
Line: 25
Column: 1
import tensorflow.compat.v2 as tf
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.eager import context
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
keras_kpl_gauge = tf.__internal__.monitoring.BoolGauge(
'/tensorflow/api/keras/layers/preprocessing',
Reported by Pylint.
Line: 26
Column: 1
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.eager import context
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
keras_kpl_gauge = tf.__internal__.monitoring.BoolGauge(
'/tensorflow/api/keras/layers/preprocessing',
'keras preprocessing layers usage', 'method')
Reported by Pylint.
Line: 228
Column: 1
"""
_disallow_inside_tf_function('adapt')
if not version_utils.should_use_v2():
raise RuntimeError('`adapt` is only supported in tensorflow v2.') # pylint: disable=g-doc-exception
if not self._is_compiled:
self.compile() # Compile with defaults.
if self.built:
self.reset_state()
data_handler = data_adapter.DataHandler(
Reported by Pylint.
Line: 90
Column: 5
preprocessing layer's state. This method handles any one-time operations
that should occur on the layer's state before `Layer.__call__`.
"""
pass
@doc_controls.do_not_generate_docs
def make_adapt_function(self):
"""Creates a function to execute one step of `adapt`.
Reported by Pylint.
Line: 152
Column: 5
if run_eagerly is None:
run_eagerly = self.dynamic
self._run_eagerly = run_eagerly
self._is_compiled = True
def adapt(self, data, batch_size=None, steps=None):
"""Fits the state of the preprocessing layer to the data being passed.
Reported by Pylint.
Line: 257
Column: 5
@tf.__internal__.tracking.no_automatic_dependency_tracking
def _configure_steps_per_execution(self, steps_per_execution):
self._steps_per_execution = tf.Variable(
steps_per_execution,
dtype='int64',
aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA)
# TODO(omalleyt): Unify this logic with `Layer._maybe_build`.
Reported by Pylint.
Line: 262
Column: 3
dtype='int64',
aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA)
# TODO(omalleyt): Unify this logic with `Layer._maybe_build`.
def _adapt_maybe_build(self, data):
if not self.built:
try:
# If this is a Numpy array or tensor, we can get shape from .shape.
# If not, an attribute error will be thrown.
Reported by Pylint.
keras/optimizer_v2/rmsprop.py
109 issues
Line: 17
Column: 1
# ==============================================================================
"""RMSprop optimizer implementation."""
import tensorflow.compat.v2 as tf
# pylint: disable=g-classes-have-attributes
import numpy as np
from keras import backend_config
from keras.optimizer_v2 import optimizer_v2
Reported by Pylint.
Line: 18
Column: 1
"""RMSprop optimizer implementation."""
import tensorflow.compat.v2 as tf
# pylint: disable=g-classes-have-attributes
import numpy as np
from keras import backend_config
from keras.optimizer_v2 import optimizer_v2
from tensorflow.python.util.tf_export import keras_export
Reported by Pylint.
Line: 21
Column: 1
# pylint: disable=g-classes-have-attributes
import numpy as np
from keras import backend_config
from keras.optimizer_v2 import optimizer_v2
from tensorflow.python.util.tf_export import keras_export
@keras_export("keras.optimizers.RMSprop")
Reported by Pylint.
Line: 22
Column: 1
import numpy as np
from keras import backend_config
from keras.optimizer_v2 import optimizer_v2
from tensorflow.python.util.tf_export import keras_export
@keras_export("keras.optimizers.RMSprop")
class RMSprop(optimizer_v2.OptimizerV2):
Reported by Pylint.
Line: 23
Column: 1
import numpy as np
from keras import backend_config
from keras.optimizer_v2 import optimizer_v2
from tensorflow.python.util.tf_export import keras_export
@keras_export("keras.optimizers.RMSprop")
class RMSprop(optimizer_v2.OptimizerV2):
r"""Optimizer that implements the RMSprop algorithm.
Reported by Pylint.
Line: 28
Column: 1
@keras_export("keras.optimizers.RMSprop")
class RMSprop(optimizer_v2.OptimizerV2):
r"""Optimizer that implements the RMSprop algorithm.
The gist of RMSprop is to:
- Maintain a moving (discounted) average of the square of gradients
- Divide the gradient by the root of this average
Reported by Pylint.
Line: 88
Column: 1
http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf)
"""
_HAS_AGGREGATE_GRAD = True
def __init__(self,
learning_rate=0.001,
rho=0.9,
momentum=0.0,
Reported by Pylint.
Line: 90
Column: 3
_HAS_AGGREGATE_GRAD = True
def __init__(self,
learning_rate=0.001,
rho=0.9,
momentum=0.0,
epsilon=1e-7,
centered=False,
Reported by Pylint.
Line: 90
Column: 1
_HAS_AGGREGATE_GRAD = True
def __init__(self,
learning_rate=0.001,
rho=0.9,
momentum=0.0,
epsilon=1e-7,
centered=False,
Reported by Pylint.
Line: 98
Column: 1
centered=False,
name="RMSprop",
**kwargs):
"""Construct a new RMSprop optimizer.
Args:
learning_rate: A `Tensor`, floating point value, or a schedule that is a
`tf.keras.optimizers.schedules.LearningRateSchedule`, or a callable
that takes no arguments and returns the actual value to use. The
Reported by Pylint.
keras/distribute/distributed_file_utils_test.py
109 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for distributed_file_utils."""
import tensorflow.compat.v2 as tf
import os
from keras.distribute import distributed_file_utils
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import os
from keras.distribute import distributed_file_utils
class DistributedFileUtilsTest(tf.test.TestCase):
Reported by Pylint.
Line: 24
Column: 1
from keras.distribute import distributed_file_utils
class DistributedFileUtilsTest(tf.test.TestCase):
class MockedExtended:
pass
class MockedChiefStrategy:
Reported by Pylint.
Line: 26
Column: 3
class DistributedFileUtilsTest(tf.test.TestCase):
class MockedExtended:
pass
class MockedChiefStrategy:
def __init__(self):
Reported by Pylint.
Line: 26
Column: 3
class DistributedFileUtilsTest(tf.test.TestCase):
class MockedExtended:
pass
class MockedChiefStrategy:
def __init__(self):
Reported by Pylint.
Line: 26
Column: 1
class DistributedFileUtilsTest(tf.test.TestCase):
class MockedExtended:
pass
class MockedChiefStrategy:
def __init__(self):
Reported by Pylint.
Line: 27
Column: 1
class DistributedFileUtilsTest(tf.test.TestCase):
class MockedExtended:
pass
class MockedChiefStrategy:
def __init__(self):
self.extended = DistributedFileUtilsTest.MockedExtended()
Reported by Pylint.
Line: 29
Column: 3
class MockedExtended:
pass
class MockedChiefStrategy:
def __init__(self):
self.extended = DistributedFileUtilsTest.MockedExtended()
self.extended._in_multi_worker_mode = lambda: True
self.extended.should_checkpoint = True
Reported by Pylint.
Line: 29
Column: 1
class MockedExtended:
pass
class MockedChiefStrategy:
def __init__(self):
self.extended = DistributedFileUtilsTest.MockedExtended()
self.extended._in_multi_worker_mode = lambda: True
self.extended.should_checkpoint = True
Reported by Pylint.
Line: 29
Column: 3
class MockedExtended:
pass
class MockedChiefStrategy:
def __init__(self):
self.extended = DistributedFileUtilsTest.MockedExtended()
self.extended._in_multi_worker_mode = lambda: True
self.extended.should_checkpoint = True
Reported by Pylint.
keras/engine/training_arrays_test.py
108 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for model.fit calls with a Dataset object passed as validation_data."""
import tensorflow.compat.v2 as tf
import io
import sys
from absl.testing import parameterized
Reported by Pylint.
Line: 22
Column: 1
import io
import sys
from absl.testing import parameterized
import numpy as np
import keras
from tensorflow.python.framework import test_util
from keras import keras_parameterized
Reported by Pylint.
Line: 26
Column: 1
import numpy as np
import keras
from tensorflow.python.framework import test_util
from keras import keras_parameterized
from keras import testing_utils
from keras.layers import core
Reported by Pylint.
Line: 113
Column: 5
@keras_parameterized.run_all_keras_modes
def test_dict_float64_input(self):
class MyModel(keras.Model):
def __init__(self):
super(MyModel, self).__init__(self)
self.dense1 = keras.layers.Dense(10, activation="relu")
self.dense2 = keras.layers.Dense(10, activation="relu")
Reported by Pylint.
Line: 122
Column: 7
self.concat = keras.layers.Concatenate()
self.dense3 = keras.layers.Dense(1, activation="sigmoid")
def call(self, inputs):
d1 = self.dense1(inputs["one"])
d2 = self.dense2(inputs["two"])
concat = self.concat([d1, d2])
return self.dense3(concat)
Reported by Pylint.
Line: 153
Column: 5
input_0 = keras.Input(shape=(None,), name="input_0")
input_1 = keras.Input(shape=(None,), name="input_1")
class my_model(keras.Model):
def __init__(self):
super(my_model, self).__init__(self)
self.hidden_layer_0 = keras.layers.Dense(100, activation="relu")
self.hidden_layer_1 = keras.layers.Dense(100, activation="relu")
Reported by Pylint.
Line: 162
Column: 7
self.concat = keras.layers.Concatenate()
self.out_layer = keras.layers.Dense(1, activation="sigmoid")
def call(self, inputs=[input_0, input_1]):
activation_0 = self.hidden_layer_0(inputs["input_0"])
activation_1 = self.hidden_layer_1(inputs["input_1"])
concat = self.concat([activation_0, activation_1])
return self.out_layer(concat)
Reported by Pylint.
Line: 162
Column: 7
self.concat = keras.layers.Concatenate()
self.out_layer = keras.layers.Dense(1, activation="sigmoid")
def call(self, inputs=[input_0, input_1]):
activation_0 = self.hidden_layer_0(inputs["input_0"])
activation_1 = self.hidden_layer_1(inputs["input_1"])
concat = self.concat([activation_0, activation_1])
return self.out_layer(concat)
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import io
import sys
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
import io
import sys
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.