The following issues were found
keras/distribute/tpu_strategy_test_utils.py
10 issues
Line: 17
Column: 1
# ==============================================================================
"""Utility functions for tests using TPUStrategy."""
import tensorflow.compat.v2 as tf
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_string("tpu", "", "Name of TPU to connect to.")
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_string("tpu", "", "Name of TPU to connect to.")
flags.DEFINE_string("project", None, "Name of GCP project with TPU.")
flags.DEFINE_string("zone", None, "Name of GCP zone with TPU.")
Reported by Pylint.
Line: 27
Column: 1
flags.DEFINE_string("zone", None, "Name of GCP zone with TPU.")
def get_tpu_cluster_resolver():
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(
tpu=FLAGS.tpu,
zone=FLAGS.zone,
project=FLAGS.project,
)
Reported by Pylint.
Line: 28
Column: 1
def get_tpu_cluster_resolver():
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(
tpu=FLAGS.tpu,
zone=FLAGS.zone,
project=FLAGS.project,
)
return resolver
Reported by Pylint.
Line: 33
Column: 1
zone=FLAGS.zone,
project=FLAGS.project,
)
return resolver
def get_tpu_strategy():
resolver = get_tpu_cluster_resolver()
tf.config.experimental_connect_to_cluster(resolver)
Reported by Pylint.
Line: 36
Column: 1
return resolver
def get_tpu_strategy():
resolver = get_tpu_cluster_resolver()
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
return tf.distribute.experimental.TPUStrategy(resolver)
Reported by Pylint.
Line: 37
Column: 1
def get_tpu_strategy():
resolver = get_tpu_cluster_resolver()
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
return tf.distribute.experimental.TPUStrategy(resolver)
Reported by Pylint.
Line: 38
Column: 1
def get_tpu_strategy():
resolver = get_tpu_cluster_resolver()
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
return tf.distribute.experimental.TPUStrategy(resolver)
Reported by Pylint.
Line: 39
Column: 1
def get_tpu_strategy():
resolver = get_tpu_cluster_resolver()
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
return tf.distribute.experimental.TPUStrategy(resolver)
Reported by Pylint.
Line: 40
Column: 1
resolver = get_tpu_cluster_resolver()
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
return tf.distribute.experimental.TPUStrategy(resolver)
Reported by Pylint.
keras/distribute/model_collection_base.py
10 issues
Line: 19
Column: 1
class ModelAndInput:
"""Base class to provide model and its corresponding inputs."""
def get_model(self):
"""Returns a compiled keras model object, together with output name.
Returns:
Reported by Pylint.
Line: 21
Column: 1
class ModelAndInput:
"""Base class to provide model and its corresponding inputs."""
def get_model(self):
"""Returns a compiled keras model object, together with output name.
Returns:
model: a keras model object
output_name: a string for the name of the output layer
Reported by Pylint.
Line: 22
Column: 1
"""Base class to provide model and its corresponding inputs."""
def get_model(self):
"""Returns a compiled keras model object, together with output name.
Returns:
model: a keras model object
output_name: a string for the name of the output layer
"""
Reported by Pylint.
Line: 28
Column: 1
model: a keras model object
output_name: a string for the name of the output layer
"""
raise NotImplementedError("must be implemented in descendants")
def get_data(self):
"""Returns data for training and predicting.
Returns:
Reported by Pylint.
Line: 30
Column: 1
"""
raise NotImplementedError("must be implemented in descendants")
def get_data(self):
"""Returns data for training and predicting.
Returns:
x_train: data used for training
y_train: label used for training
Reported by Pylint.
Line: 31
Column: 1
raise NotImplementedError("must be implemented in descendants")
def get_data(self):
"""Returns data for training and predicting.
Returns:
x_train: data used for training
y_train: label used for training
x_predict: data used for predicting
Reported by Pylint.
Line: 38
Column: 1
y_train: label used for training
x_predict: data used for predicting
"""
raise NotImplementedError("must be implemented in descendants")
def get_batch_size(self):
"""Returns the batch_size used by the model."""
raise NotImplementedError("must be implemented in descendants")
Reported by Pylint.
Line: 40
Column: 1
"""
raise NotImplementedError("must be implemented in descendants")
def get_batch_size(self):
"""Returns the batch_size used by the model."""
raise NotImplementedError("must be implemented in descendants")
Reported by Pylint.
Line: 41
Column: 1
raise NotImplementedError("must be implemented in descendants")
def get_batch_size(self):
"""Returns the batch_size used by the model."""
raise NotImplementedError("must be implemented in descendants")
Reported by Pylint.
Line: 42
Column: 1
def get_batch_size(self):
"""Returns the batch_size used by the model."""
raise NotImplementedError("must be implemented in descendants")
Reported by Pylint.
keras/datasets/mnist.py
9 issues
Line: 20
Column: 1
import numpy as np
from keras.utils.data_utils import get_file
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.datasets.mnist.load_data')
def load_data(path='mnist.npz'):
"""Loads the MNIST dataset.
Reported by Pylint.
Line: 25
Column: 1
@keras_export('keras.datasets.mnist.load_data')
def load_data(path='mnist.npz'):
"""Loads the MNIST dataset.
This is a dataset of 60,000 28x28 grayscale images of the 10 digits,
along with a test set of 10,000 images.
More info can be found at the
[MNIST homepage](http://yann.lecun.com/exdb/mnist/).
Reported by Pylint.
Line: 70
Column: 1
[Creative Commons Attribution-Share Alike 3.0 license.](
https://creativecommons.org/licenses/by-sa/3.0/)
"""
origin_folder = 'https://storage.googleapis.com/tensorflow/tf-keras-datasets/'
path = get_file(
path,
origin=origin_folder + 'mnist.npz',
file_hash=
'731c5ac602752760c8e48fbffcf8c3b850d9dc2a2aedcf2cc48468fc17b673d1')
Reported by Pylint.
Line: 71
Column: 1
https://creativecommons.org/licenses/by-sa/3.0/)
"""
origin_folder = 'https://storage.googleapis.com/tensorflow/tf-keras-datasets/'
path = get_file(
path,
origin=origin_folder + 'mnist.npz',
file_hash=
'731c5ac602752760c8e48fbffcf8c3b850d9dc2a2aedcf2cc48468fc17b673d1')
with np.load(path, allow_pickle=True) as f: # pylint: disable=unexpected-keyword-arg
Reported by Pylint.
Line: 76
Column: 44
origin=origin_folder + 'mnist.npz',
file_hash=
'731c5ac602752760c8e48fbffcf8c3b850d9dc2a2aedcf2cc48468fc17b673d1')
with np.load(path, allow_pickle=True) as f: # pylint: disable=unexpected-keyword-arg
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
return (x_train, y_train), (x_test, y_test)
Reported by Pylint.
Line: 76
Column: 1
origin=origin_folder + 'mnist.npz',
file_hash=
'731c5ac602752760c8e48fbffcf8c3b850d9dc2a2aedcf2cc48468fc17b673d1')
with np.load(path, allow_pickle=True) as f: # pylint: disable=unexpected-keyword-arg
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
return (x_train, y_train), (x_test, y_test)
Reported by Pylint.
Line: 77
Column: 1
file_hash=
'731c5ac602752760c8e48fbffcf8c3b850d9dc2a2aedcf2cc48468fc17b673d1')
with np.load(path, allow_pickle=True) as f: # pylint: disable=unexpected-keyword-arg
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
return (x_train, y_train), (x_test, y_test)
Reported by Pylint.
Line: 78
Column: 1
'731c5ac602752760c8e48fbffcf8c3b850d9dc2a2aedcf2cc48468fc17b673d1')
with np.load(path, allow_pickle=True) as f: # pylint: disable=unexpected-keyword-arg
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
return (x_train, y_train), (x_test, y_test)
Reported by Pylint.
Line: 80
Column: 1
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
return (x_train, y_train), (x_test, y_test)
Reported by Pylint.
keras/distribute/optimizer_combinations.py
9 issues
Line: 17
Column: 1
# ==============================================================================
"""Strategy and optimizer combinations for combinations.combine()."""
import tensorflow.compat.v2 as tf
from keras.optimizer_v2 import adadelta as adadelta_keras_v2
from keras.optimizer_v2 import adagrad as adagrad_keras_v2
from keras.optimizer_v2 import adam as adam_keras_v2
from keras.optimizer_v2 import adamax as adamax_keras_v2
from keras.optimizer_v2 import ftrl as ftrl_keras_v2
Reported by Pylint.
Line: 40
Column: 3
rmsprop_optimizer_v1_fn = tf.__internal__.test.combinations.NamedObject(
"RmsPropV1", lambda: tf.compat.v1.train.RMSPropOptimizer(0.001))
# TODO(shiningsun): consider adding the other v1 optimizers
optimizers_v1 = [
gradient_descent_optimizer_v1_fn, adagrad_optimizer_v1_fn,
ftrl_optimizer_v1_fn, rmsprop_optimizer_v1_fn
]
Reported by Pylint.
Line: 63
Column: 3
rmsprop_optimizer_keras_v2_fn = tf.__internal__.test.combinations.NamedObject(
"RmsPropKerasV2", lambda: rmsprop_keras_v2.RMSprop(0.001))
# TODO(shiningsun): consider adding the other v2 optimizers
optimizers_v2 = [
gradient_descent_optimizer_keras_v2_fn, adagrad_optimizer_keras_v2_fn
]
optimizers_v1_and_v2 = optimizers_v1 + optimizers_v2
Reported by Pylint.
Line: 72
Column: 1
def distributions_and_v1_optimizers():
"""A common set of combination with DistributionStrategies and Optimizers."""
return tf.__internal__.test.combinations.combine(
distribution=[
tf.__internal__.distribute.combinations.one_device_strategy,
tf.__internal__.distribute.combinations.mirrored_strategy_with_gpu_and_cpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_two_gpus,
Reported by Pylint.
Line: 73
Column: 1
def distributions_and_v1_optimizers():
"""A common set of combination with DistributionStrategies and Optimizers."""
return tf.__internal__.test.combinations.combine(
distribution=[
tf.__internal__.distribute.combinations.one_device_strategy,
tf.__internal__.distribute.combinations.mirrored_strategy_with_gpu_and_cpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_two_gpus,
tf.__internal__.distribute.combinations
Reported by Pylint.
Line: 85
Column: 1
def distributions_and_v2_optimizers():
"""A common set of combination with DistributionStrategies and Optimizers."""
return tf.__internal__.test.combinations.combine(
distribution=[
tf.__internal__.distribute.combinations.one_device_strategy,
tf.__internal__.distribute.combinations.mirrored_strategy_with_gpu_and_cpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_two_gpus,
Reported by Pylint.
Line: 86
Column: 1
def distributions_and_v2_optimizers():
"""A common set of combination with DistributionStrategies and Optimizers."""
return tf.__internal__.test.combinations.combine(
distribution=[
tf.__internal__.distribute.combinations.one_device_strategy,
tf.__internal__.distribute.combinations.mirrored_strategy_with_gpu_and_cpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_two_gpus,
tf.__internal__.distribute.combinations
Reported by Pylint.
Line: 98
Column: 1
def distributions_and_v1_and_v2_optimizers():
"""A common set of combination with DistributionStrategies and Optimizers."""
return tf.__internal__.test.combinations.combine(
distribution=[
tf.__internal__.distribute.combinations.one_device_strategy,
tf.__internal__.distribute.combinations.mirrored_strategy_with_gpu_and_cpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_two_gpus,
Reported by Pylint.
Line: 99
Column: 1
def distributions_and_v1_and_v2_optimizers():
"""A common set of combination with DistributionStrategies and Optimizers."""
return tf.__internal__.test.combinations.combine(
distribution=[
tf.__internal__.distribute.combinations.one_device_strategy,
tf.__internal__.distribute.combinations.mirrored_strategy_with_gpu_and_cpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_two_gpus,
tf.__internal__.distribute.combinations
Reported by Pylint.
keras/preprocessing/text.py
9 issues
Line: 18
Column: 1
"""Utilities for text input preprocessing."""
# pylint: disable=invalid-name
from keras_preprocessing import text
from keras.preprocessing.text_dataset import text_dataset_from_directory # pylint: disable=unused-import
from tensorflow.python.util.tf_export import keras_export
hashing_trick = text.hashing_trick
Reported by Pylint.
Line: 21
Column: 1
from keras_preprocessing import text
from keras.preprocessing.text_dataset import text_dataset_from_directory # pylint: disable=unused-import
from tensorflow.python.util.tf_export import keras_export
hashing_trick = text.hashing_trick
Tokenizer = text.Tokenizer
Reported by Pylint.
Line: 32
Column: 1
filters='!"#$%&()*+,-./:;<=>?@[\\]^_`{|}~\t\n',
lower=True,
split=' '):
"""Converts a text to a sequence of words (or tokens).
This function transforms a string of text into a list of words
while ignoring `filters` which include punctuations by default.
>>> sample_text = 'This is a sample sentence.'
Reported by Pylint.
Line: 52
Column: 1
Returns:
A list of words (or tokens).
"""
return text.text_to_word_sequence(
input_text, filters=filters, lower=lower, split=split)
@keras_export('keras.preprocessing.text.one_hot')
def one_hot(input_text,
Reported by Pylint.
Line: 62
Column: 1
filters='!"#$%&()*+,-./:;<=>?@[\\]^_`{|}~\t\n',
lower=True,
split=' '):
r"""One-hot encodes a text into a list of word indexes of size `n`.
This function receives as input a string of text and returns a
list of encoded integers each corresponding to a word (or token)
in the given input string.
Reported by Pylint.
Line: 84
Column: 1
List of integers in `[1, n]`. Each integer encodes a word
(unicity non-guaranteed).
"""
return text.one_hot(input_text, n, filters=filters, lower=lower, split=split)
# text.tokenizer_from_json is only available if keras_preprocessing >= 1.1.0
try:
tokenizer_from_json = text.tokenizer_from_json
Reported by Pylint.
Line: 89
Column: 1
# text.tokenizer_from_json is only available if keras_preprocessing >= 1.1.0
try:
tokenizer_from_json = text.tokenizer_from_json
keras_export('keras.preprocessing.text.tokenizer_from_json', allow_multiple_exports=True)(
tokenizer_from_json)
except AttributeError:
pass
Reported by Pylint.
Line: 90
Column: 1
# text.tokenizer_from_json is only available if keras_preprocessing >= 1.1.0
try:
tokenizer_from_json = text.tokenizer_from_json
keras_export('keras.preprocessing.text.tokenizer_from_json', allow_multiple_exports=True)(
tokenizer_from_json)
except AttributeError:
pass
keras_export('keras.preprocessing.text.hashing_trick', allow_multiple_exports=True)(hashing_trick)
Reported by Pylint.
Line: 93
Column: 1
keras_export('keras.preprocessing.text.tokenizer_from_json', allow_multiple_exports=True)(
tokenizer_from_json)
except AttributeError:
pass
keras_export('keras.preprocessing.text.hashing_trick', allow_multiple_exports=True)(hashing_trick)
keras_export('keras.preprocessing.text.Tokenizer', allow_multiple_exports=True)(Tokenizer)
Reported by Pylint.
keras/layers/preprocessing/preprocessing_utils.py
8 issues
Line: 18
Column: 1
"""Utils for preprocessing layers."""
import numpy as np
import tensorflow.compat.v2 as tf
def listify_tensors(x):
"""Convert any tensors or numpy arrays to lists for config serialization."""
if tf.is_tensor(x):
Reported by Pylint.
Line: 21
Column: 1
import tensorflow.compat.v2 as tf
def listify_tensors(x):
"""Convert any tensors or numpy arrays to lists for config serialization."""
if tf.is_tensor(x):
x = x.numpy()
if isinstance(x, np.ndarray):
x = x.tolist()
Reported by Pylint.
Line: 22
Column: 1
def listify_tensors(x):
"""Convert any tensors or numpy arrays to lists for config serialization."""
if tf.is_tensor(x):
x = x.numpy()
if isinstance(x, np.ndarray):
x = x.tolist()
return x
Reported by Pylint.
Line: 23
Column: 1
def listify_tensors(x):
"""Convert any tensors or numpy arrays to lists for config serialization."""
if tf.is_tensor(x):
x = x.numpy()
if isinstance(x, np.ndarray):
x = x.tolist()
return x
Reported by Pylint.
Line: 24
Column: 1
def listify_tensors(x):
"""Convert any tensors or numpy arrays to lists for config serialization."""
if tf.is_tensor(x):
x = x.numpy()
if isinstance(x, np.ndarray):
x = x.tolist()
return x
Reported by Pylint.
Line: 25
Column: 1
"""Convert any tensors or numpy arrays to lists for config serialization."""
if tf.is_tensor(x):
x = x.numpy()
if isinstance(x, np.ndarray):
x = x.tolist()
return x
Reported by Pylint.
Line: 26
Column: 1
if tf.is_tensor(x):
x = x.numpy()
if isinstance(x, np.ndarray):
x = x.tolist()
return x
Reported by Pylint.
Line: 27
Column: 1
x = x.numpy()
if isinstance(x, np.ndarray):
x = x.tolist()
return x
Reported by Pylint.
keras/saving/saved_model/network_serialization.py
8 issues
Line: 17
Column: 1
# ==============================================================================
"""Classes and functions implementing to Network SavedModel serialization."""
from keras.saving.saved_model import constants
from keras.saving.saved_model import model_serialization
# FunctionalModel serialization is pretty much the same as Model serialization.
class NetworkSavedModelSaver(model_serialization.ModelSavedModelSaver):
Reported by Pylint.
Line: 18
Column: 1
"""Classes and functions implementing to Network SavedModel serialization."""
from keras.saving.saved_model import constants
from keras.saving.saved_model import model_serialization
# FunctionalModel serialization is pretty much the same as Model serialization.
class NetworkSavedModelSaver(model_serialization.ModelSavedModelSaver):
"""Network serialization."""
Reported by Pylint.
Line: 22
Column: 1
# FunctionalModel serialization is pretty much the same as Model serialization.
class NetworkSavedModelSaver(model_serialization.ModelSavedModelSaver):
"""Network serialization."""
@property
def object_identifier(self):
return constants.NETWORK_IDENTIFIER
Reported by Pylint.
Line: 23
Column: 1
# FunctionalModel serialization is pretty much the same as Model serialization.
class NetworkSavedModelSaver(model_serialization.ModelSavedModelSaver):
"""Network serialization."""
@property
def object_identifier(self):
return constants.NETWORK_IDENTIFIER
Reported by Pylint.
Line: 25
Column: 1
class NetworkSavedModelSaver(model_serialization.ModelSavedModelSaver):
"""Network serialization."""
@property
def object_identifier(self):
return constants.NETWORK_IDENTIFIER
Reported by Pylint.
Line: 26
Column: 3
"""Network serialization."""
@property
def object_identifier(self):
return constants.NETWORK_IDENTIFIER
Reported by Pylint.
Line: 26
Column: 1
"""Network serialization."""
@property
def object_identifier(self):
return constants.NETWORK_IDENTIFIER
Reported by Pylint.
Line: 27
Column: 1
@property
def object_identifier(self):
return constants.NETWORK_IDENTIFIER
Reported by Pylint.
keras/type/types.py
7 issues
Line: 15
Column: 1
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-classes-have-attributes
"""Python module for Keras base types.
All the classes in this module is abstract classes that contains none or minimal
implementations. It is designed be used as base class for other concrete
classes, type checks, and python3 type hints.
Reported by Pylint.
Line: 25
Column: 3
import abc
# TODO(scottzhu): Export all the types under this module with API symbol.
class Layer(object, metaclass=abc.ABCMeta):
"""This is the class from which all layers inherit.
Reported by Pylint.
Line: 196
Column: 3
precision is used, layers may have different computation and variable dtypes.
See `tf.keras.mixed_precision.Policy` for details on layer dtypes.
"""
pass
Reported by Pylint.
Line: 28
Column: 1
# TODO(scottzhu): Export all the types under this module with API symbol.
class Layer(object, metaclass=abc.ABCMeta):
"""This is the class from which all layers inherit.
A layer is a callable object that takes as input one or more tensors and
that outputs one or more tensors. It involves *computation*, defined
in the `call()` method, and a *state* (weight variables), defined
Reported by Pylint.
Line: 28
Column: 1
# TODO(scottzhu): Export all the types under this module with API symbol.
class Layer(object, metaclass=abc.ABCMeta):
"""This is the class from which all layers inherit.
A layer is a callable object that takes as input one or more tensors and
that outputs one or more tensors. It involves *computation*, defined
in the `call()` method, and a *state* (weight variables), defined
Reported by Pylint.
Line: 29
Column: 1
class Layer(object, metaclass=abc.ABCMeta):
"""This is the class from which all layers inherit.
A layer is a callable object that takes as input one or more tensors and
that outputs one or more tensors. It involves *computation*, defined
in the `call()` method, and a *state* (weight variables), defined
either in the constructor `__init__()` or in the `build()` method.
Reported by Pylint.
Line: 196
Column: 1
precision is used, layers may have different computation and variable dtypes.
See `tf.keras.mixed_precision.Policy` for details on layer dtypes.
"""
pass
Reported by Pylint.
keras/tools/pip_package/setup.py
5 issues
Line: 40
Column: 1
# Add a new dep there if one is needed.
]
project_name = 'keras'
if '--project_name' in sys.argv:
project_name_idx = sys.argv.index('--project_name')
project_name = sys.argv[project_name_idx + 1]
sys.argv.remove('--project_name')
sys.argv.pop(project_name_idx)
Reported by Pylint.
Line: 42
Column: 1
project_name = 'keras'
if '--project_name' in sys.argv:
project_name_idx = sys.argv.index('--project_name')
project_name = sys.argv[project_name_idx + 1]
sys.argv.remove('--project_name')
sys.argv.pop(project_name_idx)
Reported by Pylint.
Line: 43
Column: 1
project_name = 'keras'
if '--project_name' in sys.argv:
project_name_idx = sys.argv.index('--project_name')
project_name = sys.argv[project_name_idx + 1]
sys.argv.remove('--project_name')
sys.argv.pop(project_name_idx)
setuptools.setup(
Reported by Pylint.
Line: 44
Column: 1
if '--project_name' in sys.argv:
project_name_idx = sys.argv.index('--project_name')
project_name = sys.argv[project_name_idx + 1]
sys.argv.remove('--project_name')
sys.argv.pop(project_name_idx)
setuptools.setup(
name=project_name,
Reported by Pylint.
Line: 45
Column: 1
project_name_idx = sys.argv.index('--project_name')
project_name = sys.argv[project_name_idx + 1]
sys.argv.remove('--project_name')
sys.argv.pop(project_name_idx)
setuptools.setup(
name=project_name,
version=_VERSION.replace('-', ''),
Reported by Pylint.
keras/mixed_precision/get_layer_policy.py
5 issues
Line: 22
Column: 1
"""
from keras.engine import base_layer
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.mixed_precision.experimental.get_layer_policy', v1=[])
def get_layer_policy(layer):
"""Returns the dtype policy of a layer.
Reported by Pylint.
Line: 27
Column: 1
@keras_export('keras.mixed_precision.experimental.get_layer_policy', v1=[])
def get_layer_policy(layer):
"""Returns the dtype policy of a layer.
Warning: This function is deprecated. Use
`tf.keras.layers.Layer.dtype_policy` instead.
Args:
Reported by Pylint.
Line: 38
Column: 1
Returns:
The `tf.keras.mixed_precision.Policy` of the layer.
"""
if not isinstance(layer, base_layer.Layer):
raise ValueError('get_policy can only be called on a layer, but got: %s'
% (layer,))
return layer.dtype_policy
Reported by Pylint.
Line: 39
Column: 1
The `tf.keras.mixed_precision.Policy` of the layer.
"""
if not isinstance(layer, base_layer.Layer):
raise ValueError('get_policy can only be called on a layer, but got: %s'
% (layer,))
return layer.dtype_policy
Reported by Pylint.
Line: 41
Column: 1
if not isinstance(layer, base_layer.Layer):
raise ValueError('get_policy can only be called on a layer, but got: %s'
% (layer,))
return layer.dtype_policy
Reported by Pylint.