The following issues were found
keras/integration_test/preprocessing_applied_in_dataset_creator_test.py
20 issues
Line: 20
Column: 1
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from keras.integration_test import preprocessing_test_utils as utils
ds_combinations = tf.__internal__.distribute.combinations
multi_process_runner = tf.__internal__.distribute.multi_process_runner
test_combinations = tf.__internal__.test.combinations
Reported by Pylint.
Line: 21
Column: 1
from __future__ import print_function
import tensorflow as tf
from keras.integration_test import preprocessing_test_utils as utils
ds_combinations = tf.__internal__.distribute.combinations
multi_process_runner = tf.__internal__.distribute.multi_process_runner
test_combinations = tf.__internal__.test.combinations
Reported by Pylint.
Line: 45
Column: 1
@ds_combinations.generate(
test_combinations.combine(strategy=STRATEGIES, mode="eager"))
class PreprocessingAppliedInDatasetCreatorTest(tf.test.TestCase):
"""Demonstrate Keras preprocessing layers applied in tf.data.Dataset.map."""
def testDistributedModelFit(self, strategy):
with strategy.scope():
preprocessing_model = utils.make_preprocessing_model(self.get_temp_dir())
Reported by Pylint.
Line: 46
Column: 1
@ds_combinations.generate(
test_combinations.combine(strategy=STRATEGIES, mode="eager"))
class PreprocessingAppliedInDatasetCreatorTest(tf.test.TestCase):
"""Demonstrate Keras preprocessing layers applied in tf.data.Dataset.map."""
def testDistributedModelFit(self, strategy):
with strategy.scope():
preprocessing_model = utils.make_preprocessing_model(self.get_temp_dir())
training_model = utils.make_training_model()
Reported by Pylint.
Line: 48
Column: 3
class PreprocessingAppliedInDatasetCreatorTest(tf.test.TestCase):
"""Demonstrate Keras preprocessing layers applied in tf.data.Dataset.map."""
def testDistributedModelFit(self, strategy):
with strategy.scope():
preprocessing_model = utils.make_preprocessing_model(self.get_temp_dir())
training_model = utils.make_training_model()
training_model.compile(optimizer="sgd", loss="binary_crossentropy")
Reported by Pylint.
Line: 48
Column: 3
class PreprocessingAppliedInDatasetCreatorTest(tf.test.TestCase):
"""Demonstrate Keras preprocessing layers applied in tf.data.Dataset.map."""
def testDistributedModelFit(self, strategy):
with strategy.scope():
preprocessing_model = utils.make_preprocessing_model(self.get_temp_dir())
training_model = utils.make_training_model()
training_model.compile(optimizer="sgd", loss="binary_crossentropy")
Reported by Pylint.
Line: 48
Column: 1
class PreprocessingAppliedInDatasetCreatorTest(tf.test.TestCase):
"""Demonstrate Keras preprocessing layers applied in tf.data.Dataset.map."""
def testDistributedModelFit(self, strategy):
with strategy.scope():
preprocessing_model = utils.make_preprocessing_model(self.get_temp_dir())
training_model = utils.make_training_model()
training_model.compile(optimizer="sgd", loss="binary_crossentropy")
Reported by Pylint.
Line: 49
Column: 1
"""Demonstrate Keras preprocessing layers applied in tf.data.Dataset.map."""
def testDistributedModelFit(self, strategy):
with strategy.scope():
preprocessing_model = utils.make_preprocessing_model(self.get_temp_dir())
training_model = utils.make_training_model()
training_model.compile(optimizer="sgd", loss="binary_crossentropy")
def dataset_fn(input_context):
Reported by Pylint.
Line: 50
Column: 1
def testDistributedModelFit(self, strategy):
with strategy.scope():
preprocessing_model = utils.make_preprocessing_model(self.get_temp_dir())
training_model = utils.make_training_model()
training_model.compile(optimizer="sgd", loss="binary_crossentropy")
def dataset_fn(input_context):
dataset = utils.make_dataset()
Reported by Pylint.
Line: 51
Column: 1
def testDistributedModelFit(self, strategy):
with strategy.scope():
preprocessing_model = utils.make_preprocessing_model(self.get_temp_dir())
training_model = utils.make_training_model()
training_model.compile(optimizer="sgd", loss="binary_crossentropy")
def dataset_fn(input_context):
dataset = utils.make_dataset()
dataset = dataset.shard(input_context.num_input_pipelines,
Reported by Pylint.
keras/layers/core/activation.py
20 issues
Line: 16
Column: 1
# limitations under the License.
# ==============================================================================
"""Contains the Activation layer."""
# pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import
from keras import activations
from keras.engine.base_layer import Layer
from tensorflow.python.util.tf_export import keras_export
Reported by Pylint.
Line: 16
Column: 1
# limitations under the License.
# ==============================================================================
"""Contains the Activation layer."""
# pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import
from keras import activations
from keras.engine.base_layer import Layer
from tensorflow.python.util.tf_export import keras_export
Reported by Pylint.
Line: 20
Column: 1
from keras import activations
from keras.engine.base_layer import Layer
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.layers.Activation')
class Activation(Layer):
"""Applies an activation function to an output.
Reported by Pylint.
Line: 56
Column: 3
self.supports_masking = True
self.activation = activations.get(activation)
def call(self, inputs):
return self.activation(inputs)
def compute_output_shape(self, input_shape):
return input_shape
Reported by Pylint.
Line: 25
Column: 1
@keras_export('keras.layers.Activation')
class Activation(Layer):
"""Applies an activation function to an output.
Args:
activation: Activation function, such as `tf.nn.relu`, or string name of
built-in activation function, such as "relu".
Reported by Pylint.
Line: 51
Column: 1
Same shape as input.
"""
def __init__(self, activation, **kwargs):
super(Activation, self).__init__(**kwargs)
self.supports_masking = True
self.activation = activations.get(activation)
def call(self, inputs):
Reported by Pylint.
Line: 52
Column: 1
"""
def __init__(self, activation, **kwargs):
super(Activation, self).__init__(**kwargs)
self.supports_masking = True
self.activation = activations.get(activation)
def call(self, inputs):
return self.activation(inputs)
Reported by Pylint.
Line: 52
Column: 5
"""
def __init__(self, activation, **kwargs):
super(Activation, self).__init__(**kwargs)
self.supports_masking = True
self.activation = activations.get(activation)
def call(self, inputs):
return self.activation(inputs)
Reported by Pylint.
Line: 53
Column: 1
def __init__(self, activation, **kwargs):
super(Activation, self).__init__(**kwargs)
self.supports_masking = True
self.activation = activations.get(activation)
def call(self, inputs):
return self.activation(inputs)
Reported by Pylint.
Line: 54
Column: 1
def __init__(self, activation, **kwargs):
super(Activation, self).__init__(**kwargs)
self.supports_masking = True
self.activation = activations.get(activation)
def call(self, inputs):
return self.activation(inputs)
def compute_output_shape(self, input_shape):
Reported by Pylint.
keras/mixed_precision/loss_scale.py
20 issues
Line: 21
Column: 1
on keras, and files outside of keras should not depend on files inside keras.
"""
import tensorflow.compat.v2 as tf
from keras.utils import generic_utils
def serialize(loss_scale):
Reported by Pylint.
Line: 26
Column: 1
from keras.utils import generic_utils
def serialize(loss_scale):
return generic_utils.serialize_keras_object(loss_scale)
def deserialize(config, custom_objects=None):
loss_scale_module_objects = {
Reported by Pylint.
Line: 27
Column: 1
def serialize(loss_scale):
return generic_utils.serialize_keras_object(loss_scale)
def deserialize(config, custom_objects=None):
loss_scale_module_objects = {
'FixedLossScale': tf.mixed_precision.experimental.FixedLossScale,
Reported by Pylint.
Line: 30
Column: 1
return generic_utils.serialize_keras_object(loss_scale)
def deserialize(config, custom_objects=None):
loss_scale_module_objects = {
'FixedLossScale': tf.mixed_precision.experimental.FixedLossScale,
'DynamicLossScale': tf.mixed_precision.experimental.DynamicLossScale,
}
Reported by Pylint.
Line: 31
Column: 1
def deserialize(config, custom_objects=None):
loss_scale_module_objects = {
'FixedLossScale': tf.mixed_precision.experimental.FixedLossScale,
'DynamicLossScale': tf.mixed_precision.experimental.DynamicLossScale,
}
return generic_utils.deserialize_keras_object(
Reported by Pylint.
Line: 36
Column: 1
'DynamicLossScale': tf.mixed_precision.experimental.DynamicLossScale,
}
return generic_utils.deserialize_keras_object(
config,
module_objects=loss_scale_module_objects,
custom_objects=custom_objects,
printable_module_name='loss scale'
)
Reported by Pylint.
Line: 45
Column: 1
def get(identifier):
"""Get a loss scale object."""
if isinstance(identifier, dict):
return deserialize(identifier)
if isinstance(identifier, (int, float)):
return tf.mixed_precision.experimental.FixedLossScale(identifier)
Reported by Pylint.
Line: 46
Column: 1
def get(identifier):
"""Get a loss scale object."""
if isinstance(identifier, dict):
return deserialize(identifier)
if isinstance(identifier, (int, float)):
return tf.mixed_precision.experimental.FixedLossScale(identifier)
if identifier == 'dynamic':
Reported by Pylint.
Line: 47
Column: 1
def get(identifier):
"""Get a loss scale object."""
if isinstance(identifier, dict):
return deserialize(identifier)
if isinstance(identifier, (int, float)):
return tf.mixed_precision.experimental.FixedLossScale(identifier)
if identifier == 'dynamic':
return tf.mixed_precision.experimental.DynamicLossScale()
Reported by Pylint.
Line: 49
Column: 1
if isinstance(identifier, dict):
return deserialize(identifier)
if isinstance(identifier, (int, float)):
return tf.mixed_precision.experimental.FixedLossScale(identifier)
if identifier == 'dynamic':
return tf.mixed_precision.experimental.DynamicLossScale()
if isinstance(identifier, tf.mixed_precision.experimental.LossScale):
return identifier
Reported by Pylint.
keras/saving/saved_model/metric_serialization.py
19 issues
Line: 17
Column: 1
# ==============================================================================
"""Classes and functions implementing Metrics SavedModel serialization."""
from keras.saving.saved_model import constants
from keras.saving.saved_model import layer_serialization
from keras.utils import generic_utils
import tensorflow.compat.v2 as tf
Reported by Pylint.
Line: 18
Column: 1
"""Classes and functions implementing Metrics SavedModel serialization."""
from keras.saving.saved_model import constants
from keras.saving.saved_model import layer_serialization
from keras.utils import generic_utils
import tensorflow.compat.v2 as tf
class MetricSavedModelSaver(layer_serialization.LayerSavedModelSaver):
Reported by Pylint.
Line: 19
Column: 1
from keras.saving.saved_model import constants
from keras.saving.saved_model import layer_serialization
from keras.utils import generic_utils
import tensorflow.compat.v2 as tf
class MetricSavedModelSaver(layer_serialization.LayerSavedModelSaver):
"""Metric serialization."""
Reported by Pylint.
Line: 20
Column: 1
from keras.saving.saved_model import constants
from keras.saving.saved_model import layer_serialization
from keras.utils import generic_utils
import tensorflow.compat.v2 as tf
class MetricSavedModelSaver(layer_serialization.LayerSavedModelSaver):
"""Metric serialization."""
Reported by Pylint.
Line: 43
Column: 3
def _get_serialized_attributes_internal(self, unused_serialization_cache):
return (
dict(variables=tf.__internal__.tracking.wrap(self.obj.variables)),
# TODO(b/135550038): save functions to enable saving custom metrics.
{},
)
Reported by Pylint.
Line: 23
Column: 1
import tensorflow.compat.v2 as tf
class MetricSavedModelSaver(layer_serialization.LayerSavedModelSaver):
"""Metric serialization."""
@property
def object_identifier(self):
return constants.METRIC_IDENTIFIER
Reported by Pylint.
Line: 24
Column: 1
class MetricSavedModelSaver(layer_serialization.LayerSavedModelSaver):
"""Metric serialization."""
@property
def object_identifier(self):
return constants.METRIC_IDENTIFIER
Reported by Pylint.
Line: 26
Column: 1
class MetricSavedModelSaver(layer_serialization.LayerSavedModelSaver):
"""Metric serialization."""
@property
def object_identifier(self):
return constants.METRIC_IDENTIFIER
def _python_properties_internal(self):
metadata = dict(
Reported by Pylint.
Line: 27
Column: 3
"""Metric serialization."""
@property
def object_identifier(self):
return constants.METRIC_IDENTIFIER
def _python_properties_internal(self):
metadata = dict(
class_name=generic_utils.get_registered_name(type(self.obj)),
Reported by Pylint.
Line: 27
Column: 1
"""Metric serialization."""
@property
def object_identifier(self):
return constants.METRIC_IDENTIFIER
def _python_properties_internal(self):
metadata = dict(
class_name=generic_utils.get_registered_name(type(self.obj)),
Reported by Pylint.
keras/layers/core/activity_regularization.py
19 issues
Line: 16
Column: 1
# limitations under the License.
# ==============================================================================
"""Contains the ActivityRegularization layer."""
# pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import
from keras import regularizers
from keras.engine.base_layer import Layer
from tensorflow.python.util.tf_export import keras_export
Reported by Pylint.
Line: 16
Column: 1
# limitations under the License.
# ==============================================================================
"""Contains the ActivityRegularization layer."""
# pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import
from keras import regularizers
from keras.engine.base_layer import Layer
from tensorflow.python.util.tf_export import keras_export
Reported by Pylint.
Line: 20
Column: 1
from keras import regularizers
from keras.engine.base_layer import Layer
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.layers.ActivityRegularization')
class ActivityRegularization(Layer):
"""Layer that applies an update to the cost function based input activity.
Reported by Pylint.
Line: 25
Column: 1
@keras_export('keras.layers.ActivityRegularization')
class ActivityRegularization(Layer):
"""Layer that applies an update to the cost function based input activity.
Args:
l1: L1 regularization factor (positive float).
l2: L2 regularization factor (positive float).
Reported by Pylint.
Line: 40
Column: 1
Same shape as input.
"""
def __init__(self, l1=0., l2=0., **kwargs):
super(ActivityRegularization, self).__init__(
activity_regularizer=regularizers.L1L2(l1=l1, l2=l2), **kwargs)
self.supports_masking = True
self.l1 = l1
self.l2 = l2
Reported by Pylint.
Line: 41
Column: 1
"""
def __init__(self, l1=0., l2=0., **kwargs):
super(ActivityRegularization, self).__init__(
activity_regularizer=regularizers.L1L2(l1=l1, l2=l2), **kwargs)
self.supports_masking = True
self.l1 = l1
self.l2 = l2
Reported by Pylint.
Line: 41
Column: 5
"""
def __init__(self, l1=0., l2=0., **kwargs):
super(ActivityRegularization, self).__init__(
activity_regularizer=regularizers.L1L2(l1=l1, l2=l2), **kwargs)
self.supports_masking = True
self.l1 = l1
self.l2 = l2
Reported by Pylint.
Line: 43
Column: 1
def __init__(self, l1=0., l2=0., **kwargs):
super(ActivityRegularization, self).__init__(
activity_regularizer=regularizers.L1L2(l1=l1, l2=l2), **kwargs)
self.supports_masking = True
self.l1 = l1
self.l2 = l2
def compute_output_shape(self, input_shape):
return input_shape
Reported by Pylint.
Line: 44
Column: 1
super(ActivityRegularization, self).__init__(
activity_regularizer=regularizers.L1L2(l1=l1, l2=l2), **kwargs)
self.supports_masking = True
self.l1 = l1
self.l2 = l2
def compute_output_shape(self, input_shape):
return input_shape
Reported by Pylint.
Line: 44
Column: 5
super(ActivityRegularization, self).__init__(
activity_regularizer=regularizers.L1L2(l1=l1, l2=l2), **kwargs)
self.supports_masking = True
self.l1 = l1
self.l2 = l2
def compute_output_shape(self, input_shape):
return input_shape
Reported by Pylint.
keras/layers/preprocessing/discretization_distribution_test.py
18 issues
Line: 17
Column: 1
# ==============================================================================
"""Distribution tests for keras.layers.preprocessing.discretization."""
import tensorflow.compat.v2 as tf
import numpy as np
import keras
from keras import keras_parameterized
Reported by Pylint.
Line: 33
Column: 1
strategy=strategy_combinations.all_strategies +
strategy_combinations.multi_worker_mirrored_strategies,
mode=["eager", "graph"]))
class DiscretizationDistributionTest(
keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_distribution(self, strategy):
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
Reported by Pylint.
Line: 37
Column: 1
keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_distribution(self, strategy):
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
Reported by Pylint.
Line: 37
Column: 3
keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_distribution(self, strategy):
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
Reported by Pylint.
Line: 38
Column: 1
preprocessing_test_utils.PreprocessingLayerTest):
def test_distribution(self, strategy):
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
tf.config.set_soft_device_placement(True)
Reported by Pylint.
Line: 40
Column: 1
def test_distribution(self, strategy):
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
tf.config.set_soft_device_placement(True)
with strategy.scope():
Reported by Pylint.
Line: 41
Column: 1
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
tf.config.set_soft_device_placement(True)
with strategy.scope():
input_data = keras.Input(shape=(4,))
Reported by Pylint.
Line: 43
Column: 1
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
tf.config.set_soft_device_placement(True)
with strategy.scope():
input_data = keras.Input(shape=(4,))
layer = discretization.Discretization(bin_boundaries=[0., 1., 2.])
bucket_data = layer(input_data)
Reported by Pylint.
Line: 45
Column: 1
tf.config.set_soft_device_placement(True)
with strategy.scope():
input_data = keras.Input(shape=(4,))
layer = discretization.Discretization(bin_boundaries=[0., 1., 2.])
bucket_data = layer(input_data)
self.assertAllEqual(expected_output_shape, bucket_data.shape.as_list())
Reported by Pylint.
Line: 46
Column: 1
tf.config.set_soft_device_placement(True)
with strategy.scope():
input_data = keras.Input(shape=(4,))
layer = discretization.Discretization(bin_boundaries=[0., 1., 2.])
bucket_data = layer(input_data)
self.assertAllEqual(expected_output_shape, bucket_data.shape.as_list())
model = keras.Model(inputs=input_data, outputs=bucket_data)
Reported by Pylint.
keras/mixed_precision/get_layer_policy_test.py
18 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests the get_layer_policy function."""
import tensorflow.compat.v2 as tf
from keras.engine import base_layer_utils
from keras.layers import core
from keras.mixed_precision import get_layer_policy
from keras.mixed_precision import policy
Reported by Pylint.
Line: 25
Column: 1
from keras.mixed_precision import policy
class GetLayerPolicyTest(tf.test.TestCase):
def test_get_layer_policy(self):
layer = core.Dense(4)
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float32')
Reported by Pylint.
Line: 27
Column: 3
class GetLayerPolicyTest(tf.test.TestCase):
def test_get_layer_policy(self):
layer = core.Dense(4)
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float32')
p = policy.Policy('mixed_float16')
layer = core.Dense(4, dtype=p)
Reported by Pylint.
Line: 27
Column: 1
class GetLayerPolicyTest(tf.test.TestCase):
def test_get_layer_policy(self):
layer = core.Dense(4)
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float32')
p = policy.Policy('mixed_float16')
layer = core.Dense(4, dtype=p)
Reported by Pylint.
Line: 28
Column: 1
class GetLayerPolicyTest(tf.test.TestCase):
def test_get_layer_policy(self):
layer = core.Dense(4)
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float32')
p = policy.Policy('mixed_float16')
layer = core.Dense(4, dtype=p)
self.assertIs(get_layer_policy.get_layer_policy(layer), p)
Reported by Pylint.
Line: 29
Column: 1
def test_get_layer_policy(self):
layer = core.Dense(4)
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float32')
p = policy.Policy('mixed_float16')
layer = core.Dense(4, dtype=p)
self.assertIs(get_layer_policy.get_layer_policy(layer), p)
Reported by Pylint.
Line: 31
Column: 5
layer = core.Dense(4)
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float32')
p = policy.Policy('mixed_float16')
layer = core.Dense(4, dtype=p)
self.assertIs(get_layer_policy.get_layer_policy(layer), p)
layer = core.Dense(4, dtype='float64')
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float64')
Reported by Pylint.
Line: 31
Column: 1
layer = core.Dense(4)
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float32')
p = policy.Policy('mixed_float16')
layer = core.Dense(4, dtype=p)
self.assertIs(get_layer_policy.get_layer_policy(layer), p)
layer = core.Dense(4, dtype='float64')
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float64')
Reported by Pylint.
Line: 32
Column: 1
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float32')
p = policy.Policy('mixed_float16')
layer = core.Dense(4, dtype=p)
self.assertIs(get_layer_policy.get_layer_policy(layer), p)
layer = core.Dense(4, dtype='float64')
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float64')
Reported by Pylint.
Line: 33
Column: 1
p = policy.Policy('mixed_float16')
layer = core.Dense(4, dtype=p)
self.assertIs(get_layer_policy.get_layer_policy(layer), p)
layer = core.Dense(4, dtype='float64')
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float64')
def test_error(self):
Reported by Pylint.
keras/saving/saved_model/create_test_saved_model.py
17 issues
Line: 7
Column: 1
different processes.
"""
from absl import app
from absl import flags
from keras import regularizers
from keras import testing_utils
import tensorflow.compat.v2 as tf
Reported by Pylint.
Line: 8
Column: 1
"""
from absl import app
from absl import flags
from keras import regularizers
from keras import testing_utils
import tensorflow.compat.v2 as tf
Reported by Pylint.
Line: 9
Column: 1
from absl import app
from absl import flags
from keras import regularizers
from keras import testing_utils
import tensorflow.compat.v2 as tf
flags.DEFINE_string('output_path', '', 'The path to write the SavedModel at.')
Reported by Pylint.
Line: 10
Column: 1
from absl import app
from absl import flags
from keras import regularizers
from keras import testing_utils
import tensorflow.compat.v2 as tf
flags.DEFINE_string('output_path', '', 'The path to write the SavedModel at.')
Reported by Pylint.
Line: 12
Column: 1
from keras import regularizers
from keras import testing_utils
import tensorflow.compat.v2 as tf
flags.DEFINE_string('output_path', '', 'The path to write the SavedModel at.')
FLAGS = flags.FLAGS
Reported by Pylint.
Line: 19
Column: 1
FLAGS = flags.FLAGS
def main(_) -> None:
with testing_utils.model_type_scope('functional'):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.layers[-1].activity_regularizer = regularizers.get('l2')
model.activity_regularizer = regularizers.get('l2')
model.compile(
Reported by Pylint.
Line: 20
Column: 1
def main(_) -> None:
with testing_utils.model_type_scope('functional'):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.layers[-1].activity_regularizer = regularizers.get('l2')
model.activity_regularizer = regularizers.get('l2')
model.compile(
loss='mse',
Reported by Pylint.
Line: 21
Column: 1
def main(_) -> None:
with testing_utils.model_type_scope('functional'):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.layers[-1].activity_regularizer = regularizers.get('l2')
model.activity_regularizer = regularizers.get('l2')
model.compile(
loss='mse',
optimizer='rmsprop')
Reported by Pylint.
Line: 22
Column: 1
def main(_) -> None:
with testing_utils.model_type_scope('functional'):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.layers[-1].activity_regularizer = regularizers.get('l2')
model.activity_regularizer = regularizers.get('l2')
model.compile(
loss='mse',
optimizer='rmsprop')
def callable_loss():
Reported by Pylint.
Line: 23
Column: 1
with testing_utils.model_type_scope('functional'):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.layers[-1].activity_regularizer = regularizers.get('l2')
model.activity_regularizer = regularizers.get('l2')
model.compile(
loss='mse',
optimizer='rmsprop')
def callable_loss():
return tf.reduce_sum(model.weights[0])
Reported by Pylint.
keras/utils/dataset_creator.py
17 issues
Line: 15
Column: 1
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-classes-have-attributes
"""Input dataset creator for `model.fit`."""
import tensorflow.compat.v2 as tf
from tensorflow.python.util.tf_export import keras_export
Reported by Pylint.
Line: 18
Column: 1
# pylint: disable=g-classes-have-attributes
"""Input dataset creator for `model.fit`."""
import tensorflow.compat.v2 as tf
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.utils.experimental.DatasetCreator', v1=[])
class DatasetCreator:
Reported by Pylint.
Line: 19
Column: 1
"""Input dataset creator for `model.fit`."""
import tensorflow.compat.v2 as tf
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.utils.experimental.DatasetCreator', v1=[])
class DatasetCreator:
"""Object that returns a `tf.data.Dataset` upon invoking.
Reported by Pylint.
Line: 23
Column: 1
@keras_export('keras.utils.experimental.DatasetCreator', v1=[])
class DatasetCreator:
"""Object that returns a `tf.data.Dataset` upon invoking.
`tf.keras.utils.experimental.DatasetCreator` is designated as a supported type
for `x`, or the input, in `tf.keras.Model.fit`. Pass an instance of this class
to `fit` when using a callable (with a `input_context` argument) that returns
Reported by Pylint.
Line: 24
Column: 1
@keras_export('keras.utils.experimental.DatasetCreator', v1=[])
class DatasetCreator:
"""Object that returns a `tf.data.Dataset` upon invoking.
`tf.keras.utils.experimental.DatasetCreator` is designated as a supported type
for `x`, or the input, in `tf.keras.Model.fit`. Pass an instance of this class
to `fit` when using a callable (with a `input_context` argument) that returns
a `tf.data.Dataset`.
Reported by Pylint.
Line: 88
Column: 1
information.
"""
def __init__(self, dataset_fn, input_options=None):
if not callable(dataset_fn):
raise TypeError(
'`dataset_fn` for `DatasetCreator` must be a `callable`. '
f'Received: {dataset_fn}')
if input_options and (not isinstance(input_options,
Reported by Pylint.
Line: 89
Column: 1
"""
def __init__(self, dataset_fn, input_options=None):
if not callable(dataset_fn):
raise TypeError(
'`dataset_fn` for `DatasetCreator` must be a `callable`. '
f'Received: {dataset_fn}')
if input_options and (not isinstance(input_options,
tf.distribute.InputOptions)):
Reported by Pylint.
Line: 90
Column: 1
def __init__(self, dataset_fn, input_options=None):
if not callable(dataset_fn):
raise TypeError(
'`dataset_fn` for `DatasetCreator` must be a `callable`. '
f'Received: {dataset_fn}')
if input_options and (not isinstance(input_options,
tf.distribute.InputOptions)):
raise TypeError(
Reported by Pylint.
Line: 93
Column: 1
raise TypeError(
'`dataset_fn` for `DatasetCreator` must be a `callable`. '
f'Received: {dataset_fn}')
if input_options and (not isinstance(input_options,
tf.distribute.InputOptions)):
raise TypeError(
'`input_options` for `DatasetCreator` must be a '
f'`tf.distribute.InputOptions`. Received: {input_options}')
Reported by Pylint.
Line: 95
Column: 1
f'Received: {dataset_fn}')
if input_options and (not isinstance(input_options,
tf.distribute.InputOptions)):
raise TypeError(
'`input_options` for `DatasetCreator` must be a '
f'`tf.distribute.InputOptions`. Received: {input_options}')
self.dataset_fn = dataset_fn
self.input_options = input_options
Reported by Pylint.
keras/datasets/fashion_mnist.py
17 issues
Line: 23
Column: 1
import numpy as np
from keras.utils.data_utils import get_file
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.datasets.fashion_mnist.load_data')
def load_data():
"""Loads the Fashion-MNIST dataset.
Reported by Pylint.
Line: 28
Column: 1
@keras_export('keras.datasets.fashion_mnist.load_data')
def load_data():
"""Loads the Fashion-MNIST dataset.
This is a dataset of 60,000 28x28 grayscale images of 10 fashion categories,
along with a test set of 10,000 images. This dataset can be used as
a drop-in replacement for MNIST.
Reported by Pylint.
Line: 80
Column: 1
https://github.com/zalandoresearch/fashion-mnist/blob/master/LICENSE).
"""
dirname = os.path.join('datasets', 'fashion-mnist')
base = 'https://storage.googleapis.com/tensorflow/tf-keras-datasets/'
files = [
'train-labels-idx1-ubyte.gz', 'train-images-idx3-ubyte.gz',
't10k-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz'
]
Reported by Pylint.
Line: 81
Column: 1
"""
dirname = os.path.join('datasets', 'fashion-mnist')
base = 'https://storage.googleapis.com/tensorflow/tf-keras-datasets/'
files = [
'train-labels-idx1-ubyte.gz', 'train-images-idx3-ubyte.gz',
't10k-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz'
]
Reported by Pylint.
Line: 82
Column: 1
"""
dirname = os.path.join('datasets', 'fashion-mnist')
base = 'https://storage.googleapis.com/tensorflow/tf-keras-datasets/'
files = [
'train-labels-idx1-ubyte.gz', 'train-images-idx3-ubyte.gz',
't10k-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz'
]
paths = []
Reported by Pylint.
Line: 87
Column: 1
't10k-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz'
]
paths = []
for fname in files:
paths.append(get_file(fname, origin=base + fname, cache_subdir=dirname))
with gzip.open(paths[0], 'rb') as lbpath:
y_train = np.frombuffer(lbpath.read(), np.uint8, offset=8)
Reported by Pylint.
Line: 88
Column: 1
]
paths = []
for fname in files:
paths.append(get_file(fname, origin=base + fname, cache_subdir=dirname))
with gzip.open(paths[0], 'rb') as lbpath:
y_train = np.frombuffer(lbpath.read(), np.uint8, offset=8)
Reported by Pylint.
Line: 89
Column: 1
paths = []
for fname in files:
paths.append(get_file(fname, origin=base + fname, cache_subdir=dirname))
with gzip.open(paths[0], 'rb') as lbpath:
y_train = np.frombuffer(lbpath.read(), np.uint8, offset=8)
with gzip.open(paths[1], 'rb') as imgpath:
Reported by Pylint.
Line: 91
Column: 1
for fname in files:
paths.append(get_file(fname, origin=base + fname, cache_subdir=dirname))
with gzip.open(paths[0], 'rb') as lbpath:
y_train = np.frombuffer(lbpath.read(), np.uint8, offset=8)
with gzip.open(paths[1], 'rb') as imgpath:
x_train = np.frombuffer(
imgpath.read(), np.uint8, offset=16).reshape(len(y_train), 28, 28)
Reported by Pylint.
Line: 92
Column: 1
paths.append(get_file(fname, origin=base + fname, cache_subdir=dirname))
with gzip.open(paths[0], 'rb') as lbpath:
y_train = np.frombuffer(lbpath.read(), np.uint8, offset=8)
with gzip.open(paths[1], 'rb') as imgpath:
x_train = np.frombuffer(
imgpath.read(), np.uint8, offset=16).reshape(len(y_train), 28, 28)
Reported by Pylint.