The following issues were found
keras/tests/model_architectures_test.py
62 issues
Line: 18
Column: 1
# pylint: disable=protected-access
"""Tests for saving/loading function for keras Model."""
import tensorflow.compat.v2 as tf
import os
import shutil
from absl.testing import parameterized
Reported by Pylint.
Line: 23
Column: 1
import os
import shutil
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
from keras import optimizer_v1
Reported by Pylint.
Line: 26
Column: 1
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
from keras import optimizer_v1
from keras import testing_utils
from keras.tests import model_architectures
Reported by Pylint.
Line: 27
Column: 1
import numpy as np
import keras
from keras import keras_parameterized
from keras import optimizer_v1
from keras import testing_utils
from keras.tests import model_architectures
Reported by Pylint.
Line: 28
Column: 1
import keras
from keras import keras_parameterized
from keras import optimizer_v1
from keras import testing_utils
from keras.tests import model_architectures
@keras_parameterized.run_with_all_saved_model_formats
Reported by Pylint.
Line: 29
Column: 1
import keras
from keras import keras_parameterized
from keras import optimizer_v1
from keras import testing_utils
from keras.tests import model_architectures
@keras_parameterized.run_with_all_saved_model_formats
class TestModelArchitectures(keras_parameterized.TestCase):
Reported by Pylint.
Line: 30
Column: 1
from keras import keras_parameterized
from keras import optimizer_v1
from keras import testing_utils
from keras.tests import model_architectures
@keras_parameterized.run_with_all_saved_model_formats
class TestModelArchitectures(keras_parameterized.TestCase):
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
import os
import shutil
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 21
Column: 1
import tensorflow.compat.v2 as tf
import os
import shutil
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 34
Column: 1
@keras_parameterized.run_with_all_saved_model_formats
class TestModelArchitectures(keras_parameterized.TestCase):
def _save_model_dir(self, dirname='saved_model'):
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
return os.path.join(temp_dir, dirname)
Reported by Pylint.
keras/layers/serialization.py
62 issues
Line: 17
Column: 1
# ==============================================================================
"""Layer serialization/deserialization functions."""
import tensorflow.compat.v2 as tf
# pylint: disable=wildcard-import
# pylint: disable=unused-import
import threading
from keras.engine import base_layer
Reported by Pylint.
Line: 56
Column: 1
from keras.layers.preprocessing import text_vectorization
from keras.utils import generic_utils
from keras.utils import tf_inspect as inspect
from tensorflow.python.util.tf_export import keras_export
ALL_MODULES = (base_layer, input_layer, advanced_activations, convolutional,
convolutional_recurrent, core, cudnn_recurrent, dense_attention,
embeddings, einsum_dense, local, merge, noise,
batch_normalization_v1, layer_normalization, pooling,
Reported by Pylint.
Line: 112
Column: 1
'BatchNormalizationV2'] = batch_normalization.BatchNormalization
# Prevent circular dependencies.
from keras import models # pylint: disable=g-import-not-at-top
from keras.premade.linear import LinearModel # pylint: disable=g-import-not-at-top
from keras.premade.wide_deep import WideDeepModel # pylint: disable=g-import-not-at-top
from keras.feature_column.sequence_feature_column import SequenceFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['Input'] = input_layer.Input
Reported by Pylint.
Line: 113
Column: 1
# Prevent circular dependencies.
from keras import models # pylint: disable=g-import-not-at-top
from keras.premade.linear import LinearModel # pylint: disable=g-import-not-at-top
from keras.premade.wide_deep import WideDeepModel # pylint: disable=g-import-not-at-top
from keras.feature_column.sequence_feature_column import SequenceFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['Input'] = input_layer.Input
LOCAL.ALL_OBJECTS['InputSpec'] = input_spec.InputSpec
Reported by Pylint.
Line: 114
Column: 1
# Prevent circular dependencies.
from keras import models # pylint: disable=g-import-not-at-top
from keras.premade.linear import LinearModel # pylint: disable=g-import-not-at-top
from keras.premade.wide_deep import WideDeepModel # pylint: disable=g-import-not-at-top
from keras.feature_column.sequence_feature_column import SequenceFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['Input'] = input_layer.Input
LOCAL.ALL_OBJECTS['InputSpec'] = input_spec.InputSpec
LOCAL.ALL_OBJECTS['Functional'] = models.Functional
Reported by Pylint.
Line: 115
Column: 1
from keras import models # pylint: disable=g-import-not-at-top
from keras.premade.linear import LinearModel # pylint: disable=g-import-not-at-top
from keras.premade.wide_deep import WideDeepModel # pylint: disable=g-import-not-at-top
from keras.feature_column.sequence_feature_column import SequenceFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['Input'] = input_layer.Input
LOCAL.ALL_OBJECTS['InputSpec'] = input_spec.InputSpec
LOCAL.ALL_OBJECTS['Functional'] = models.Functional
LOCAL.ALL_OBJECTS['Model'] = models.Model
Reported by Pylint.
Line: 127
Column: 1
LOCAL.ALL_OBJECTS['WideDeepModel'] = WideDeepModel
if tf.__internal__.tf2.enabled():
from keras.feature_column.dense_features_v2 import DenseFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['DenseFeatures'] = DenseFeatures
else:
from keras.feature_column.dense_features import DenseFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['DenseFeatures'] = DenseFeatures
Reported by Pylint.
Line: 130
Column: 1
from keras.feature_column.dense_features_v2 import DenseFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['DenseFeatures'] = DenseFeatures
else:
from keras.feature_column.dense_features import DenseFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['DenseFeatures'] = DenseFeatures
# Merge layers, function versions.
LOCAL.ALL_OBJECTS['add'] = merge.add
LOCAL.ALL_OBJECTS['subtract'] = merge.subtract
Reported by Pylint.
Line: 75
Column: 3
def populate_deserializable_objects():
"""Populates dict ALL_OBJECTS with every built-in layer."""
global LOCAL
if not hasattr(LOCAL, 'ALL_OBJECTS'):
LOCAL.ALL_OBJECTS = {}
LOCAL.GENERATED_WITH_V2 = None
if LOCAL.ALL_OBJECTS and LOCAL.GENERATED_WITH_V2 == tf.__internal__.tf2.enabled(
Reported by Pylint.
Line: 21
Column: 1
# pylint: disable=wildcard-import
# pylint: disable=unused-import
import threading
from keras.engine import base_layer
from keras.engine import input_layer
from keras.engine import input_spec
from keras.layers import advanced_activations
from keras.layers import convolutional
Reported by Pylint.
keras/utils/kernelized_utils_test.py
62 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for kernelized_utils.py."""
import tensorflow.compat.v2 as tf
import functools
from absl.testing import parameterized
from keras.utils import kernelized_utils
Reported by Pylint.
Line: 21
Column: 1
import functools
from absl.testing import parameterized
from keras.utils import kernelized_utils
def _exact_gaussian(stddev):
return functools.partial(
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import functools
from absl.testing import parameterized
from keras.utils import kernelized_utils
Reported by Pylint.
Line: 26
Column: 1
def _exact_gaussian(stddev):
return functools.partial(
kernelized_utils.exact_gaussian_kernel, stddev=stddev)
def _exact_laplacian(stddev):
return functools.partial(
Reported by Pylint.
Line: 31
Column: 1
def _exact_laplacian(stddev):
return functools.partial(
kernelized_utils.exact_laplacian_kernel, stddev=stddev)
class KernelizedUtilsTest(tf.test.TestCase, parameterized.TestCase):
Reported by Pylint.
Line: 35
Column: 1
kernelized_utils.exact_laplacian_kernel, stddev=stddev)
class KernelizedUtilsTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
('gaussian', _exact_gaussian(stddev=10.0), [[1.0]]),
('laplacian', _exact_laplacian(stddev=50.0), [[1.0]]))
def test_equal_vectors(self, exact_kernel_fn, expected_values):
Reported by Pylint.
Line: 37
Column: 1
class KernelizedUtilsTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
('gaussian', _exact_gaussian(stddev=10.0), [[1.0]]),
('laplacian', _exact_laplacian(stddev=50.0), [[1.0]]))
def test_equal_vectors(self, exact_kernel_fn, expected_values):
"""Identical vectors give exactly the identity kernel value."""
x = tf.constant([0.5, -0.5, -0.5, 0.5])
Reported by Pylint.
Line: 40
Column: 1
@parameterized.named_parameters(
('gaussian', _exact_gaussian(stddev=10.0), [[1.0]]),
('laplacian', _exact_laplacian(stddev=50.0), [[1.0]]))
def test_equal_vectors(self, exact_kernel_fn, expected_values):
"""Identical vectors give exactly the identity kernel value."""
x = tf.constant([0.5, -0.5, -0.5, 0.5])
y = tf.constant([0.5, -0.5, -0.5, 0.5])
exact_kernel = exact_kernel_fn(x, y)
shape = exact_kernel.shape.as_list()
Reported by Pylint.
Line: 41
Column: 1
('gaussian', _exact_gaussian(stddev=10.0), [[1.0]]),
('laplacian', _exact_laplacian(stddev=50.0), [[1.0]]))
def test_equal_vectors(self, exact_kernel_fn, expected_values):
"""Identical vectors give exactly the identity kernel value."""
x = tf.constant([0.5, -0.5, -0.5, 0.5])
y = tf.constant([0.5, -0.5, -0.5, 0.5])
exact_kernel = exact_kernel_fn(x, y)
shape = exact_kernel.shape.as_list()
self.assertLen(shape, 2)
Reported by Pylint.
Line: 42
Column: 1
('laplacian', _exact_laplacian(stddev=50.0), [[1.0]]))
def test_equal_vectors(self, exact_kernel_fn, expected_values):
"""Identical vectors give exactly the identity kernel value."""
x = tf.constant([0.5, -0.5, -0.5, 0.5])
y = tf.constant([0.5, -0.5, -0.5, 0.5])
exact_kernel = exact_kernel_fn(x, y)
shape = exact_kernel.shape.as_list()
self.assertLen(shape, 2)
# x and y are identical and therefore K(x, y) will be precisely equal to
Reported by Pylint.
keras/mixed_precision/device_compatibility_check.py
61 issues
Line: 17
Column: 1
# ==============================================================================
"""Contains function to log if devices are compatible with mixed precision."""
import tensorflow.compat.v2 as tf
import itertools
from tensorflow.python.platform import tf_logging
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
import itertools
from tensorflow.python.platform import tf_logging
_COMPAT_CHECK_PREFIX = 'Mixed precision compatibility check (mixed_float16): '
_COMPAT_CHECK_OK_PREFIX = _COMPAT_CHECK_PREFIX + 'OK'
_COMPAT_CHECK_WARNING_PREFIX = _COMPAT_CHECK_PREFIX + 'WARNING'
Reported by Pylint.
Line: 69
Column: 3
`tf.config.experimental.get_device_details()`.
"""
if policy_name != 'mixed_float16':
# TODO(b/145686977): Log if the policy is 'mixed_bfloat16'. This requires
# checking if a TPU is available.
return
supported_device_strs = []
unsupported_device_strs = []
for details in gpu_details_list:
Reported by Pylint.
Line: 141
Column: 3
Args:
policy_name: The name of the dtype policy.
"""
global _logged_compatibility_check
if _logged_compatibility_check:
return
_logged_compatibility_check = True
gpus = tf.config.list_physical_devices('GPU')
gpu_details_list = [tf.config.experimental.get_device_details(g) for g in gpus]
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import itertools
from tensorflow.python.platform import tf_logging
_COMPAT_CHECK_PREFIX = 'Mixed precision compatibility check (mixed_float16): '
_COMPAT_CHECK_OK_PREFIX = _COMPAT_CHECK_PREFIX + 'OK'
Reported by Pylint.
Line: 33
Column: 1
def _dedup_strings(device_strs):
"""Groups together consecutive identical strings.
For example, given:
['GPU 1', 'GPU 2', 'GPU 2', 'GPU 3', 'GPU 3', 'GPU 3']
This function returns:
['GPU 1', 'GPU 2 (x2)', 'GPU 3 (x3)']
Reported by Pylint.
Line: 47
Column: 1
A copy of the input, but identical consecutive strings are merged into a
single string.
"""
new_device_strs = []
for device_str, vals in itertools.groupby(device_strs):
num = len(list(vals))
if num == 1:
new_device_strs.append(device_str)
else:
Reported by Pylint.
Line: 48
Column: 1
single string.
"""
new_device_strs = []
for device_str, vals in itertools.groupby(device_strs):
num = len(list(vals))
if num == 1:
new_device_strs.append(device_str)
else:
new_device_strs.append('%s (x%d)' % (device_str, num))
Reported by Pylint.
Line: 49
Column: 1
"""
new_device_strs = []
for device_str, vals in itertools.groupby(device_strs):
num = len(list(vals))
if num == 1:
new_device_strs.append(device_str)
else:
new_device_strs.append('%s (x%d)' % (device_str, num))
return new_device_strs
Reported by Pylint.
Line: 50
Column: 1
new_device_strs = []
for device_str, vals in itertools.groupby(device_strs):
num = len(list(vals))
if num == 1:
new_device_strs.append(device_str)
else:
new_device_strs.append('%s (x%d)' % (device_str, num))
return new_device_strs
Reported by Pylint.
keras/distribute/custom_training_loop_optimizer_test.py
60 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for custom training loops that involves advanced optimizer usage."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
from tensorflow.python.distribute import values
from keras.distribute import strategy_combinations as keras_strategy_combinations
from keras.optimizer_v2 import gradient_descent
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
from tensorflow.python.distribute import values
from keras.distribute import strategy_combinations as keras_strategy_combinations
from keras.optimizer_v2 import gradient_descent
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
from tensorflow.python.distribute import values
from keras.distribute import strategy_combinations as keras_strategy_combinations
from keras.optimizer_v2 import gradient_descent
class OptimizerTest(tf.test.TestCase, parameterized.TestCase):
Reported by Pylint.
Line: 25
Column: 1
from keras.optimizer_v2 import gradient_descent
class OptimizerTest(tf.test.TestCase, parameterized.TestCase):
@tf.__internal__.distribute.combinations.generate(
tf.__internal__.test.combinations.times(
tf.__internal__.test.combinations.combine(
distribution=keras_strategy_combinations.multidevice_strategies,
Reported by Pylint.
Line: 27
Column: 1
class OptimizerTest(tf.test.TestCase, parameterized.TestCase):
@tf.__internal__.distribute.combinations.generate(
tf.__internal__.test.combinations.times(
tf.__internal__.test.combinations.combine(
distribution=keras_strategy_combinations.multidevice_strategies,
mode=["eager"],
),
Reported by Pylint.
Line: 39
Column: 3
tf.__internal__.test.combinations.combine(
experimental_aggregate_gradients=False,
expected=[[[-0.1, -0.1], [-0.2, -0.2]]])
))
def test_custom_aggregation(self, distribution,
experimental_aggregate_gradients, expected):
with distribution.scope():
v = tf.Variable([0., 0.])
Reported by Pylint.
Line: 40
Column: 1
experimental_aggregate_gradients=False,
expected=[[[-0.1, -0.1], [-0.2, -0.2]]])
))
def test_custom_aggregation(self, distribution,
experimental_aggregate_gradients, expected):
with distribution.scope():
v = tf.Variable([0., 0.])
optimizer = gradient_descent.SGD(0.1)
Reported by Pylint.
Line: 43
Column: 1
def test_custom_aggregation(self, distribution,
experimental_aggregate_gradients, expected):
with distribution.scope():
v = tf.Variable([0., 0.])
optimizer = gradient_descent.SGD(0.1)
class PerReplica(values.DistributedValues):
"""Holds a map from replica to unsynchronized values."""
Reported by Pylint.
Line: 44
Column: 1
experimental_aggregate_gradients, expected):
with distribution.scope():
v = tf.Variable([0., 0.])
optimizer = gradient_descent.SGD(0.1)
class PerReplica(values.DistributedValues):
"""Holds a map from replica to unsynchronized values."""
Reported by Pylint.
Line: 44
Column: 7
experimental_aggregate_gradients, expected):
with distribution.scope():
v = tf.Variable([0., 0.])
optimizer = gradient_descent.SGD(0.1)
class PerReplica(values.DistributedValues):
"""Holds a map from replica to unsynchronized values."""
Reported by Pylint.
keras/layers/einsum_dense_test.py
60 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for Keras-based einsum dense layer."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 25
Column: 1
import keras
from keras import keras_parameterized # pylint: disable=g-direct-tensorflow-import
from keras import testing_utils
from keras.layers import einsum_dense
@keras_parameterized.run_all_keras_modes
Reported by Pylint.
Line: 263
Column: 29
"equation": "...b,bc->...c",
"bias_axes": "c",
"output_shape": 4,
"bias_initializer": keras.initializers.constant(0.03),
"kernel_initializer": keras.initializers.constant(0.5),
"dtype": input_data.dtype
}
expected_output = np.array([[1.53, 1.53, 1.53, 1.53],
[3.53, 3.53, 3.53, 3.53]])
Reported by Pylint.
Line: 264
Column: 31
"bias_axes": "c",
"output_shape": 4,
"bias_initializer": keras.initializers.constant(0.03),
"kernel_initializer": keras.initializers.constant(0.5),
"dtype": input_data.dtype
}
expected_output = np.array([[1.53, 1.53, 1.53, 1.53],
[3.53, 3.53, 3.53, 3.53]])
Reported by Pylint.
Line: 229
Column: 35
expected_output_shape):
del expected_output_shape # Not used in this test.
weight_shape, bias_shape, _ = einsum_dense._analyze_einsum_string(
equation, bias_axes, input_shape, output_shape)
self.assertAllEqual(expected_weight_shape, weight_shape)
self.assertAllEqual(expected_bias_shape, bias_shape)
Reported by Pylint.
Line: 222
Column: 1
"expected_bias_shape": [3, 4, 1],
"expected_output_shape": (None, 3, 4, 2)
})
class TestEinsumDenseLayer(keras_parameterized.TestCase):
def test_weight_shapes(self, equation, bias_axes, input_shape, output_shape,
expected_weight_shape, expected_bias_shape,
expected_output_shape):
del expected_output_shape # Not used in this test.
Reported by Pylint.
Line: 224
Column: 3
})
class TestEinsumDenseLayer(keras_parameterized.TestCase):
def test_weight_shapes(self, equation, bias_axes, input_shape, output_shape,
expected_weight_shape, expected_bias_shape,
expected_output_shape):
del expected_output_shape # Not used in this test.
weight_shape, bias_shape, _ = einsum_dense._analyze_einsum_string(
Reported by Pylint.
Line: 224
Column: 3
})
class TestEinsumDenseLayer(keras_parameterized.TestCase):
def test_weight_shapes(self, equation, bias_axes, input_shape, output_shape,
expected_weight_shape, expected_bias_shape,
expected_output_shape):
del expected_output_shape # Not used in this test.
weight_shape, bias_shape, _ = einsum_dense._analyze_einsum_string(
Reported by Pylint.
Line: 224
Column: 1
})
class TestEinsumDenseLayer(keras_parameterized.TestCase):
def test_weight_shapes(self, equation, bias_axes, input_shape, output_shape,
expected_weight_shape, expected_bias_shape,
expected_output_shape):
del expected_output_shape # Not used in this test.
weight_shape, bias_shape, _ = einsum_dense._analyze_einsum_string(
Reported by Pylint.
keras/optimizer_v2/adagrad.py
59 issues
Line: 17
Column: 1
# ==============================================================================
"""Adagrad optimizer implementation."""
import tensorflow.compat.v2 as tf
# pylint: disable=g-classes-have-attributes
import numpy as np
from keras import backend_config
from keras.optimizer_v2 import optimizer_v2
Reported by Pylint.
Line: 18
Column: 1
"""Adagrad optimizer implementation."""
import tensorflow.compat.v2 as tf
# pylint: disable=g-classes-have-attributes
import numpy as np
from keras import backend_config
from keras.optimizer_v2 import optimizer_v2
from tensorflow.python.util.tf_export import keras_export
Reported by Pylint.
Line: 21
Column: 1
# pylint: disable=g-classes-have-attributes
import numpy as np
from keras import backend_config
from keras.optimizer_v2 import optimizer_v2
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.optimizers.Adagrad')
Reported by Pylint.
Line: 22
Column: 1
import numpy as np
from keras import backend_config
from keras.optimizer_v2 import optimizer_v2
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.optimizers.Adagrad')
class Adagrad(optimizer_v2.OptimizerV2):
Reported by Pylint.
Line: 23
Column: 1
import numpy as np
from keras import backend_config
from keras.optimizer_v2 import optimizer_v2
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.optimizers.Adagrad')
class Adagrad(optimizer_v2.OptimizerV2):
r"""Optimizer that implements the Adagrad algorithm.
Reported by Pylint.
Line: 106
Column: 32
super(Adagrad, self).set_weights(weights)
@classmethod
def from_config(cls, config, custom_objects=None):
"""Creates an optimizer from its config.
This method is the reverse of `get_config`,
capable of instantiating the same optimizer from the config
dictionary.
Reported by Pylint.
Line: 28
Column: 1
@keras_export('keras.optimizers.Adagrad')
class Adagrad(optimizer_v2.OptimizerV2):
r"""Optimizer that implements the Adagrad algorithm.
Adagrad is an optimizer with parameter-specific learning rates,
which are adapted relative to how frequently a parameter gets
updated during training. The more updates a parameter receives,
the smaller the updates.
Reported by Pylint.
Line: 61
Column: 1
http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf).
"""
_HAS_AGGREGATE_GRAD = True
def __init__(self,
learning_rate=0.001,
initial_accumulator_value=0.1,
epsilon=1e-7,
Reported by Pylint.
Line: 63
Column: 1
_HAS_AGGREGATE_GRAD = True
def __init__(self,
learning_rate=0.001,
initial_accumulator_value=0.1,
epsilon=1e-7,
name='Adagrad',
**kwargs):
Reported by Pylint.
Line: 69
Column: 1
epsilon=1e-7,
name='Adagrad',
**kwargs):
if initial_accumulator_value < 0.0:
raise ValueError('initial_accumulator_value must be non-negative: %s' %
initial_accumulator_value)
if epsilon is None:
epsilon = backend_config.epsilon()
super(Adagrad, self).__init__(name, **kwargs)
Reported by Pylint.
keras/datasets/imdb.py
58 issues
Line: 23
Column: 1
from keras.preprocessing.sequence import _remove_long_seq
from keras.utils.data_utils import get_file
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.datasets.imdb.load_data')
def load_data(path='imdb.npz',
Reported by Pylint.
Line: 24
Column: 1
from keras.preprocessing.sequence import _remove_long_seq
from keras.utils.data_utils import get_file
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.datasets.imdb.load_data')
def load_data(path='imdb.npz',
num_words=None,
Reported by Pylint.
Line: 112
Column: 9
x_train, labels_train = f['x_train'], f['y_train']
x_test, labels_test = f['x_test'], f['y_test']
rng = np.random.RandomState(seed)
indices = np.arange(len(x_train))
rng.shuffle(indices)
x_train = x_train[indices]
labels_train = labels_train[indices]
Reported by Pylint.
Line: 28
Column: 1
@keras_export('keras.datasets.imdb.load_data')
def load_data(path='imdb.npz',
num_words=None,
skip_top=0,
maxlen=None,
seed=113,
start_char=1,
Reported by Pylint.
Line: 28
Column: 1
@keras_export('keras.datasets.imdb.load_data')
def load_data(path='imdb.npz',
num_words=None,
skip_top=0,
maxlen=None,
seed=113,
start_char=1,
Reported by Pylint.
Line: 37
Column: 1
oov_char=2,
index_from=3,
**kwargs):
"""Loads the [IMDB dataset](https://ai.stanford.edu/~amaas/data/sentiment/).
This is a dataset of 25,000 movies reviews from IMDB, labeled by sentiment
(positive/negative). Reviews have been preprocessed, and each review is
encoded as a list of word indexes (integers).
For convenience, words are indexed by overall frequency in the dataset,
Reported by Pylint.
Line: 95
Column: 1
have simply been skipped.
"""
# Legacy support
if 'nb_words' in kwargs:
logging.warning('The `nb_words` argument in `load_data` '
'has been renamed `num_words`.')
num_words = kwargs.pop('nb_words')
if kwargs:
raise TypeError(f'Unrecognized keyword arguments: {str(kwargs)}.')
Reported by Pylint.
Line: 96
Column: 1
"""
# Legacy support
if 'nb_words' in kwargs:
logging.warning('The `nb_words` argument in `load_data` '
'has been renamed `num_words`.')
num_words = kwargs.pop('nb_words')
if kwargs:
raise TypeError(f'Unrecognized keyword arguments: {str(kwargs)}.')
Reported by Pylint.
Line: 98
Column: 1
if 'nb_words' in kwargs:
logging.warning('The `nb_words` argument in `load_data` '
'has been renamed `num_words`.')
num_words = kwargs.pop('nb_words')
if kwargs:
raise TypeError(f'Unrecognized keyword arguments: {str(kwargs)}.')
origin_folder = 'https://storage.googleapis.com/tensorflow/tf-keras-datasets/'
path = get_file(
Reported by Pylint.
Line: 99
Column: 1
logging.warning('The `nb_words` argument in `load_data` '
'has been renamed `num_words`.')
num_words = kwargs.pop('nb_words')
if kwargs:
raise TypeError(f'Unrecognized keyword arguments: {str(kwargs)}.')
origin_folder = 'https://storage.googleapis.com/tensorflow/tf-keras-datasets/'
path = get_file(
path,
Reported by Pylint.
keras/distribute/custom_training_loop_metrics_test.py
58 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for custom training loops."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
from tensorflow.python.framework import test_util
from keras import metrics
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
from tensorflow.python.framework import test_util
from keras import metrics
from keras.distribute import strategy_combinations
Reported by Pylint.
Line: 21
Column: 1
from absl.testing import parameterized
import numpy as np
from tensorflow.python.framework import test_util
from keras import metrics
from keras.distribute import strategy_combinations
class KerasMetricsTest(tf.test.TestCase, parameterized.TestCase):
Reported by Pylint.
Line: 26
Column: 1
from keras.distribute import strategy_combinations
class KerasMetricsTest(tf.test.TestCase, parameterized.TestCase):
@tf.__internal__.distribute.combinations.generate(
tf.__internal__.test.combinations.combine(
distribution=strategy_combinations.all_strategies +
strategy_combinations.multiworker_strategies,
Reported by Pylint.
Line: 28
Column: 1
class KerasMetricsTest(tf.test.TestCase, parameterized.TestCase):
@tf.__internal__.distribute.combinations.generate(
tf.__internal__.test.combinations.combine(
distribution=strategy_combinations.all_strategies +
strategy_combinations.multiworker_strategies,
mode=["eager"]
))
Reported by Pylint.
Line: 33
Column: 3
distribution=strategy_combinations.all_strategies +
strategy_combinations.multiworker_strategies,
mode=["eager"]
))
def test_multiple_keras_metrics_experimental_run(self, distribution):
with distribution.scope():
loss_metric = metrics.Mean("loss", dtype=np.float32)
loss_metric_2 = metrics.Mean("loss_2", dtype=np.float32)
Reported by Pylint.
Line: 34
Column: 1
strategy_combinations.multiworker_strategies,
mode=["eager"]
))
def test_multiple_keras_metrics_experimental_run(self, distribution):
with distribution.scope():
loss_metric = metrics.Mean("loss", dtype=np.float32)
loss_metric_2 = metrics.Mean("loss_2", dtype=np.float32)
@tf.function
Reported by Pylint.
Line: 35
Column: 1
mode=["eager"]
))
def test_multiple_keras_metrics_experimental_run(self, distribution):
with distribution.scope():
loss_metric = metrics.Mean("loss", dtype=np.float32)
loss_metric_2 = metrics.Mean("loss_2", dtype=np.float32)
@tf.function
def train_step():
Reported by Pylint.
Line: 36
Column: 1
))
def test_multiple_keras_metrics_experimental_run(self, distribution):
with distribution.scope():
loss_metric = metrics.Mean("loss", dtype=np.float32)
loss_metric_2 = metrics.Mean("loss_2", dtype=np.float32)
@tf.function
def train_step():
def step_fn():
Reported by Pylint.
Line: 37
Column: 1
def test_multiple_keras_metrics_experimental_run(self, distribution):
with distribution.scope():
loss_metric = metrics.Mean("loss", dtype=np.float32)
loss_metric_2 = metrics.Mean("loss_2", dtype=np.float32)
@tf.function
def train_step():
def step_fn():
loss = tf.constant(5.0, dtype=np.float32)
Reported by Pylint.
keras/utils/version_utils.py
57 issues
Line: 18
Column: 1
# pylint: disable=protected-access
"""Utilities for Keras classes with v1 and v2 versions."""
import tensorflow.compat.v2 as tf
from keras.utils.generic_utils import LazyLoader
# TODO(b/134426265): Switch back to single-quotes once the issue
# with copybara is fixed.
# pylint: disable=g-inconsistent-quotes
Reported by Pylint.
Line: 23
Column: 1
# TODO(b/134426265): Switch back to single-quotes once the issue
# with copybara is fixed.
# pylint: disable=g-inconsistent-quotes
training = LazyLoader(
"training", globals(),
"keras.engine.training")
training_v1 = LazyLoader(
"training_v1", globals(),
Reported by Pylint.
Line: 44
Column: 1
"keras.callbacks_v1")
# pylint: enable=g-inconsistent-quotes
class ModelVersionSelector:
"""Chooses between Keras v1 and v2 Model class."""
Reported by Pylint.
Line: 21
Column: 3
import tensorflow.compat.v2 as tf
from keras.utils.generic_utils import LazyLoader
# TODO(b/134426265): Switch back to single-quotes once the issue
# with copybara is fixed.
# pylint: disable=g-inconsistent-quotes
training = LazyLoader(
"training", globals(),
"keras.engine.training")
Reported by Pylint.
Line: 71
Column: 5
def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument
use_v2 = should_use_v2()
start_cls = cls
cls = swap_class(start_cls, callbacks.TensorBoard, callbacks_v1.TensorBoard,
use_v2)
if start_cls == callbacks_v1.TensorBoard and cls == callbacks.TensorBoard:
# Since the v2 class is not a subclass of the v1 class, __init__ has to
# be called manually.
return cls(*args, **kwargs)
Reported by Pylint.
Line: 47
Column: 1
# pylint: enable=g-inconsistent-quotes
class ModelVersionSelector:
"""Chooses between Keras v1 and v2 Model class."""
def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument
use_v2 = should_use_v2()
cls = swap_class(cls, training.Model, training_v1.Model, use_v2) # pylint: disable=self-cls-assignment
Reported by Pylint.
Line: 48
Column: 1
class ModelVersionSelector:
"""Chooses between Keras v1 and v2 Model class."""
def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument
use_v2 = should_use_v2()
cls = swap_class(cls, training.Model, training_v1.Model, use_v2) # pylint: disable=self-cls-assignment
return super(ModelVersionSelector, cls).__new__(cls)
Reported by Pylint.
Line: 50
Column: 1
class ModelVersionSelector:
"""Chooses between Keras v1 and v2 Model class."""
def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument
use_v2 = should_use_v2()
cls = swap_class(cls, training.Model, training_v1.Model, use_v2) # pylint: disable=self-cls-assignment
return super(ModelVersionSelector, cls).__new__(cls)
Reported by Pylint.
Line: 51
Column: 1
"""Chooses between Keras v1 and v2 Model class."""
def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument
use_v2 = should_use_v2()
cls = swap_class(cls, training.Model, training_v1.Model, use_v2) # pylint: disable=self-cls-assignment
return super(ModelVersionSelector, cls).__new__(cls)
class LayerVersionSelector:
Reported by Pylint.
Line: 52
Column: 1
def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument
use_v2 = should_use_v2()
cls = swap_class(cls, training.Model, training_v1.Model, use_v2) # pylint: disable=self-cls-assignment
return super(ModelVersionSelector, cls).__new__(cls)
class LayerVersionSelector:
"""Chooses between Keras v1 and v2 Layer class."""
Reported by Pylint.