The following issues were found
keras/utils/vis_utils.py
212 issues
Line: 16
Column: 1
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access
# pylint: disable=g-import-not-at-top
"""Utilities related to model visualization."""
import tensorflow.compat.v2 as tf
import os
Reported by Pylint.
Line: 19
Column: 1
# pylint: disable=g-import-not-at-top
"""Utilities related to model visualization."""
import tensorflow.compat.v2 as tf
import os
import sys
import re
from keras.utils.io_utils import path_to_string
Reported by Pylint.
Line: 25
Column: 1
import sys
import re
from keras.utils.io_utils import path_to_string
from tensorflow.python.util.tf_export import keras_export
try:
# pydot-ng is a fork of pydot that is better maintained.
import pydot_ng as pydot
Reported by Pylint.
Line: 290
Column: 5
if (layer_range) and (i <= layer_range[0] or i > layer_range[1]):
continue
layer_id = str(id(layer))
for i, node in enumerate(layer._inbound_nodes):
node_key = layer.name + '_ib-' + str(i)
if node_key in model._network_nodes:
for inbound_layer in tf.nest.flatten(node.inbound_layers):
inbound_layer_id = str(id(inbound_layer))
if not expand_nested:
Reported by Pylint.
Line: 21
Column: 1
import tensorflow.compat.v2 as tf
import os
import sys
import re
from keras.utils.io_utils import path_to_string
from tensorflow.python.util.tf_export import keras_export
Reported by Pylint.
Line: 22
Column: 1
import tensorflow.compat.v2 as tf
import os
import sys
import re
from keras.utils.io_utils import path_to_string
from tensorflow.python.util.tf_export import keras_export
Reported by Pylint.
Line: 23
Column: 1
import os
import sys
import re
from keras.utils.io_utils import path_to_string
from tensorflow.python.util.tf_export import keras_export
try:
Reported by Pylint.
Line: 30
Column: 1
try:
# pydot-ng is a fork of pydot that is better maintained.
import pydot_ng as pydot
except ImportError:
# pydotplus is an improved version of pydot
try:
import pydotplus as pydot
except ImportError:
Reported by Pylint.
Line: 33
Column: 1
import pydot_ng as pydot
except ImportError:
# pydotplus is an improved version of pydot
try:
import pydotplus as pydot
except ImportError:
# Fall back on pydot if necessary.
try:
import pydot
Reported by Pylint.
Line: 34
Column: 1
except ImportError:
# pydotplus is an improved version of pydot
try:
import pydotplus as pydot
except ImportError:
# Fall back on pydot if necessary.
try:
import pydot
except ImportError:
Reported by Pylint.
keras/layers/normalization/layer_normalization_test.py
211 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for normalization layers."""
import tensorflow.compat.v2 as tf
import numpy as np
import keras
from keras import combinations
Reported by Pylint.
Line: 220
Column: 47
for dtype in 'float64', 'float32', 'float16':
norm = layer_normalization.LayerNormalization(
axis=axis, dtype=dtype, batch_input_shape=batch_input_shape,
epsilon=epsilon, beta_initializer=keras.initializers.constant(beta),
gamma_initializer=keras.initializers.constant(gamma))
y = norm(keras.backend.cast(x, dtype))
actual = keras.backend.eval(y)
if dtype == 'float64':
Reported by Pylint.
Line: 221
Column: 31
norm = layer_normalization.LayerNormalization(
axis=axis, dtype=dtype, batch_input_shape=batch_input_shape,
epsilon=epsilon, beta_initializer=keras.initializers.constant(beta),
gamma_initializer=keras.initializers.constant(gamma))
y = norm(keras.backend.cast(x, dtype))
actual = keras.backend.eval(y)
if dtype == 'float64':
tol = fp64_tol
Reported by Pylint.
Line: 280
Column: 47
for dtype in 'float64', 'float32', 'float16':
norm = layer_normalization.LayerNormalization(
axis=axis, dtype=dtype, batch_input_shape=batch_input_shape,
epsilon=epsilon, beta_initializer=keras.initializers.constant(beta),
gamma_initializer=keras.initializers.constant(gamma))
norm.build(x.shape)
# pylint: disable=cell-var-from-loop
def forward_fn(x, beta, gamma):
Reported by Pylint.
Line: 281
Column: 31
norm = layer_normalization.LayerNormalization(
axis=axis, dtype=dtype, batch_input_shape=batch_input_shape,
epsilon=epsilon, beta_initializer=keras.initializers.constant(beta),
gamma_initializer=keras.initializers.constant(gamma))
norm.build(x.shape)
# pylint: disable=cell-var-from-loop
def forward_fn(x, beta, gamma):
# We must monkey-patch the attributes of `norm` with the function
Reported by Pylint.
Line: 175
Column: 22
def testFusedAttr(self):
layer_norm = layer_normalization.LayerNormalization(axis=[-2, -1])
layer_norm.build(input_shape=(2, 2, 2))
self.assertEqual(layer_norm._fused, True)
class LayerNormalizationNumericsTest(keras_parameterized.TestCase):
"""Tests LayerNormalization has correct and numerically stable outputs."""
Reported by Pylint.
Line: 29
Column: 1
def _run_layernorm_correctness_test(layer, dtype='float32'):
model = keras.models.Sequential()
model.add(keras.layers.Lambda(lambda x: tf.cast(x, dtype='float16')))
norm = layer(input_shape=(2, 2, 2), dtype=dtype)
model.add(norm)
model.compile(
loss='mse',
Reported by Pylint.
Line: 30
Column: 1
def _run_layernorm_correctness_test(layer, dtype='float32'):
model = keras.models.Sequential()
model.add(keras.layers.Lambda(lambda x: tf.cast(x, dtype='float16')))
norm = layer(input_shape=(2, 2, 2), dtype=dtype)
model.add(norm)
model.compile(
loss='mse',
optimizer=tf.compat.v1.train.GradientDescentOptimizer(0.01),
Reported by Pylint.
Line: 31
Column: 1
def _run_layernorm_correctness_test(layer, dtype='float32'):
model = keras.models.Sequential()
model.add(keras.layers.Lambda(lambda x: tf.cast(x, dtype='float16')))
norm = layer(input_shape=(2, 2, 2), dtype=dtype)
model.add(norm)
model.compile(
loss='mse',
optimizer=tf.compat.v1.train.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
Reported by Pylint.
Line: 32
Column: 1
model = keras.models.Sequential()
model.add(keras.layers.Lambda(lambda x: tf.cast(x, dtype='float16')))
norm = layer(input_shape=(2, 2, 2), dtype=dtype)
model.add(norm)
model.compile(
loss='mse',
optimizer=tf.compat.v1.train.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
Reported by Pylint.
keras/layers/preprocessing/hashing_test.py
211 issues
Line: 18
Column: 1
"""Tests for hashing layer."""
import os
from absl.testing import parameterized
import keras
from keras import keras_parameterized
from keras import testing_utils
from keras.engine import input_layer
Reported by Pylint.
Line: 27
Column: 1
from keras.engine import training
from keras.layers.preprocessing import hashing
import numpy as np
import tensorflow.compat.v2 as tf
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class HashingTest(keras_parameterized.TestCase):
Reported by Pylint.
Line: 31
Column: 1
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class HashingTest(keras_parameterized.TestCase):
def test_hash_single_bin(self):
layer = hashing.Hashing(num_bins=1)
inp = np.asarray([['A'], ['B'], ['C'], ['D'], ['E']])
output = layer(inp)
Reported by Pylint.
Line: 31
Column: 1
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class HashingTest(keras_parameterized.TestCase):
def test_hash_single_bin(self):
layer = hashing.Hashing(num_bins=1)
inp = np.asarray([['A'], ['B'], ['C'], ['D'], ['E']])
output = layer(inp)
Reported by Pylint.
Line: 33
Column: 3
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class HashingTest(keras_parameterized.TestCase):
def test_hash_single_bin(self):
layer = hashing.Hashing(num_bins=1)
inp = np.asarray([['A'], ['B'], ['C'], ['D'], ['E']])
output = layer(inp)
self.assertAllClose([[0], [0], [0], [0], [0]], output)
Reported by Pylint.
Line: 33
Column: 1
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class HashingTest(keras_parameterized.TestCase):
def test_hash_single_bin(self):
layer = hashing.Hashing(num_bins=1)
inp = np.asarray([['A'], ['B'], ['C'], ['D'], ['E']])
output = layer(inp)
self.assertAllClose([[0], [0], [0], [0], [0]], output)
Reported by Pylint.
Line: 34
Column: 1
class HashingTest(keras_parameterized.TestCase):
def test_hash_single_bin(self):
layer = hashing.Hashing(num_bins=1)
inp = np.asarray([['A'], ['B'], ['C'], ['D'], ['E']])
output = layer(inp)
self.assertAllClose([[0], [0], [0], [0], [0]], output)
def test_hash_dense_input_farmhash(self):
Reported by Pylint.
Line: 35
Column: 1
def test_hash_single_bin(self):
layer = hashing.Hashing(num_bins=1)
inp = np.asarray([['A'], ['B'], ['C'], ['D'], ['E']])
output = layer(inp)
self.assertAllClose([[0], [0], [0], [0], [0]], output)
def test_hash_dense_input_farmhash(self):
layer = hashing.Hashing(num_bins=2)
Reported by Pylint.
Line: 36
Column: 1
def test_hash_single_bin(self):
layer = hashing.Hashing(num_bins=1)
inp = np.asarray([['A'], ['B'], ['C'], ['D'], ['E']])
output = layer(inp)
self.assertAllClose([[0], [0], [0], [0], [0]], output)
def test_hash_dense_input_farmhash(self):
layer = hashing.Hashing(num_bins=2)
inp = np.asarray([['omar'], ['stringer'], ['marlo'], ['wire'],
Reported by Pylint.
Line: 37
Column: 1
layer = hashing.Hashing(num_bins=1)
inp = np.asarray([['A'], ['B'], ['C'], ['D'], ['E']])
output = layer(inp)
self.assertAllClose([[0], [0], [0], [0], [0]], output)
def test_hash_dense_input_farmhash(self):
layer = hashing.Hashing(num_bins=2)
inp = np.asarray([['omar'], ['stringer'], ['marlo'], ['wire'],
['skywalker']])
Reported by Pylint.
keras/layers/cudnn_recurrent.py
210 issues
Line: 17
Column: 1
# ==============================================================================
"""Recurrent layers backed by cuDNN."""
import tensorflow.compat.v2 as tf
import collections
from keras import backend
from keras import constraints
from keras import initializers
Reported by Pylint.
Line: 27
Column: 1
from keras.engine.input_spec import InputSpec
from keras.layers import recurrent_v2
from keras.layers.recurrent import RNN
from tensorflow.python.util.tf_export import keras_export
class _CuDNNRNN(RNN):
"""Private base class for CuDNNGRU and CuDNNLSTM layers.
Reported by Pylint.
Line: 76
Column: 3
self._num_constants = 0
self._vector_shape = tf.constant([-1])
def call(self, inputs, mask=None, training=None, initial_state=None):
if isinstance(mask, list):
mask = mask[0]
if mask is not None:
raise ValueError('Masking is not supported for CuDNN RNNs.')
Reported by Pylint.
Line: 130
Column: 3
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config):
return cls(**config)
@property
def trainable_weights(self):
if self.trainable and self.built:
Reported by Pylint.
Line: 242
Column: 5
input_shape = input_shape[0]
input_dim = int(input_shape[-1])
self.kernel = self.add_weight(
shape=(input_dim, self.units * 3),
name='kernel',
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
Reported by Pylint.
Line: 249
Column: 5
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units * 3),
name='recurrent_kernel',
initializer=self.recurrent_initializer,
regularizer=self.recurrent_regularizer,
constraint=self.recurrent_constraint)
Reported by Pylint.
Line: 256
Column: 5
regularizer=self.recurrent_regularizer,
constraint=self.recurrent_constraint)
self.bias = self.add_weight(
shape=(self.units * 6,),
name='bias',
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
Reported by Pylint.
Line: 428
Column: 5
input_shape = input_shape[0]
input_dim = int(input_shape[-1])
self.kernel = self.add_weight(
shape=(input_dim, self.units * 4),
name='kernel',
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
Reported by Pylint.
Line: 435
Column: 5
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units * 4),
name='recurrent_kernel',
initializer=self.recurrent_initializer,
regularizer=self.recurrent_regularizer,
constraint=self.recurrent_constraint)
Reported by Pylint.
Line: 452
Column: 5
], axis=0)
else:
bias_initializer = self.bias_initializer
self.bias = self.add_weight(
shape=(self.units * 8,),
name='bias',
initializer=bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
Reported by Pylint.
keras/layers/preprocessing/normalization_test.py
209 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for keras.layers.preprocessing.normalization."""
import tensorflow.compat.v2 as tf
import os
from absl.testing import parameterized
Reported by Pylint.
Line: 21
Column: 1
import os
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
Reported by Pylint.
Line: 221
Column: 5
input_data = keras.Input(shape=input_shape)
output = layer(input_data)
model = keras.Model(input_data, output)
model._run_eagerly = testing_utils.should_run_eagerly()
output_data = model.predict(test_data)
self.assertAllClose(expected, output_data)
def test_1d_unbatched_adapt(self):
ds = tf.data.Dataset.from_tensor_slices([
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import os
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 33
Column: 1
def _get_layer_computation_test_cases():
test_cases = ({
"adapt_data": np.array([[1.], [2.], [3.], [4.], [5.]], dtype=np.float32),
"axis": -1,
"test_data": np.array([[1.], [2.], [3.]], np.float32),
"expected": np.array([[-1.414214], [-.707107], [0]], np.float32),
"testcase_name": "2d_single_element"
Reported by Pylint.
Line: 100
Column: 1
"zero_variance"
})
crossed_test_cases = []
# Cross above test cases with use_dataset in (True, False)
for use_dataset in (True, False):
for case in test_cases:
case = case.copy()
if use_dataset:
Reported by Pylint.
Line: 102
Column: 1
crossed_test_cases = []
# Cross above test cases with use_dataset in (True, False)
for use_dataset in (True, False):
for case in test_cases:
case = case.copy()
if use_dataset:
case["testcase_name"] = case["testcase_name"] + "_with_dataset"
case["use_dataset"] = use_dataset
Reported by Pylint.
Line: 103
Column: 1
crossed_test_cases = []
# Cross above test cases with use_dataset in (True, False)
for use_dataset in (True, False):
for case in test_cases:
case = case.copy()
if use_dataset:
case["testcase_name"] = case["testcase_name"] + "_with_dataset"
case["use_dataset"] = use_dataset
crossed_test_cases.append(case)
Reported by Pylint.
Line: 104
Column: 1
# Cross above test cases with use_dataset in (True, False)
for use_dataset in (True, False):
for case in test_cases:
case = case.copy()
if use_dataset:
case["testcase_name"] = case["testcase_name"] + "_with_dataset"
case["use_dataset"] = use_dataset
crossed_test_cases.append(case)
Reported by Pylint.
Line: 105
Column: 1
for use_dataset in (True, False):
for case in test_cases:
case = case.copy()
if use_dataset:
case["testcase_name"] = case["testcase_name"] + "_with_dataset"
case["use_dataset"] = use_dataset
crossed_test_cases.append(case)
return crossed_test_cases
Reported by Pylint.
keras/utils/conv_utils.py
209 issues
Line: 17
Column: 1
# ==============================================================================
"""Utilities used by convolution layers."""
import tensorflow.compat.v2 as tf
import itertools
import numpy as np
from keras import backend
Reported by Pylint.
Line: 78
Column: 7
try:
value_tuple = tuple(value)
except TypeError:
raise ValueError(error_msg)
if len(value_tuple) != n:
raise ValueError(error_msg)
for single_value in value_tuple:
try:
int(single_value)
Reported by Pylint.
Line: 87
Column: 9
except (ValueError, TypeError):
error_msg += (f'including element {single_value} of '
f'type {type(single_value)}')
raise ValueError(error_msg)
return value_tuple
def conv_output_length(input_length, filter_size, padding, stride, dilation=1):
"""Determines output length of a convolution given input length.
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import itertools
import numpy as np
from keras import backend
Reported by Pylint.
Line: 25
Column: 1
from keras import backend
def convert_data_format(data_format, ndim):
if data_format == 'channels_last':
if ndim == 3:
return 'NWC'
elif ndim == 4:
return 'NHWC'
Reported by Pylint.
Line: 26
Column: 1
def convert_data_format(data_format, ndim):
if data_format == 'channels_last':
if ndim == 3:
return 'NWC'
elif ndim == 4:
return 'NHWC'
elif ndim == 5:
Reported by Pylint.
Line: 27
Column: 5
def convert_data_format(data_format, ndim):
if data_format == 'channels_last':
if ndim == 3:
return 'NWC'
elif ndim == 4:
return 'NHWC'
elif ndim == 5:
return 'NDHWC'
Reported by Pylint.
Line: 27
Column: 1
def convert_data_format(data_format, ndim):
if data_format == 'channels_last':
if ndim == 3:
return 'NWC'
elif ndim == 4:
return 'NHWC'
elif ndim == 5:
return 'NDHWC'
Reported by Pylint.
Line: 28
Column: 1
def convert_data_format(data_format, ndim):
if data_format == 'channels_last':
if ndim == 3:
return 'NWC'
elif ndim == 4:
return 'NHWC'
elif ndim == 5:
return 'NDHWC'
else:
Reported by Pylint.
Line: 29
Column: 1
if data_format == 'channels_last':
if ndim == 3:
return 'NWC'
elif ndim == 4:
return 'NHWC'
elif ndim == 5:
return 'NDHWC'
else:
raise ValueError(
Reported by Pylint.
keras/tests/model_architectures.py
208 issues
Line: 19
Column: 1
import collections
import keras
# Declaring namedtuple()
ModelFn = collections.namedtuple('ModelFn',
['model', 'input_shape', 'target_shape'])
Reported by Pylint.
Line: 164
Column: 1
self.bn = keras.layers.BatchNormalization()
self.dp = keras.layers.Dropout(0.5)
def call(self, inputs, **kwargs):
x = self.dense1(inputs)
x = self.dp(x)
x = self.bn(x)
return self.dense2(x)
Reported by Pylint.
Line: 27
Column: 1
def basic_sequential():
"""Basic sequential model."""
model = keras.Sequential([
keras.layers.Dense(3, activation='relu', input_shape=(3,)),
keras.layers.Dense(2, activation='softmax'),
])
return ModelFn(model, (None, 3), (None, 2))
Reported by Pylint.
Line: 28
Column: 1
def basic_sequential():
"""Basic sequential model."""
model = keras.Sequential([
keras.layers.Dense(3, activation='relu', input_shape=(3,)),
keras.layers.Dense(2, activation='softmax'),
])
return ModelFn(model, (None, 3), (None, 2))
Reported by Pylint.
Line: 32
Column: 1
keras.layers.Dense(3, activation='relu', input_shape=(3,)),
keras.layers.Dense(2, activation='softmax'),
])
return ModelFn(model, (None, 3), (None, 2))
def basic_sequential_deferred():
"""Sequential model with deferred input shape."""
model = keras.Sequential([
Reported by Pylint.
Line: 36
Column: 1
def basic_sequential_deferred():
"""Sequential model with deferred input shape."""
model = keras.Sequential([
keras.layers.Dense(3, activation='relu'),
keras.layers.Dense(2, activation='softmax'),
])
return ModelFn(model, (None, 3), (None, 2))
Reported by Pylint.
Line: 37
Column: 1
def basic_sequential_deferred():
"""Sequential model with deferred input shape."""
model = keras.Sequential([
keras.layers.Dense(3, activation='relu'),
keras.layers.Dense(2, activation='softmax'),
])
return ModelFn(model, (None, 3), (None, 2))
Reported by Pylint.
Line: 41
Column: 1
keras.layers.Dense(3, activation='relu'),
keras.layers.Dense(2, activation='softmax'),
])
return ModelFn(model, (None, 3), (None, 2))
def stacked_rnn():
"""Stacked RNN model."""
inputs = keras.Input((None, 3))
Reported by Pylint.
Line: 45
Column: 1
def stacked_rnn():
"""Stacked RNN model."""
inputs = keras.Input((None, 3))
layer = keras.layers.RNN([keras.layers.LSTMCell(2) for _ in range(3)])
x = layer(inputs)
outputs = keras.layers.Dense(2)(x)
model = keras.Model(inputs, outputs)
Reported by Pylint.
Line: 46
Column: 1
def stacked_rnn():
"""Stacked RNN model."""
inputs = keras.Input((None, 3))
layer = keras.layers.RNN([keras.layers.LSTMCell(2) for _ in range(3)])
x = layer(inputs)
outputs = keras.layers.Dense(2)(x)
model = keras.Model(inputs, outputs)
return ModelFn(model, (None, 4, 3), (None, 2))
Reported by Pylint.
keras/engine/training_eager_test.py
205 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for training routines."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
from keras import metrics as metrics_module
Reported by Pylint.
Line: 37
Column: 5
# Only test Eager modes, as Graph mode is not relevant for dynamic models.
return
class DynamicModel(keras.Model):
def __init__(self):
super(DynamicModel, self).__init__(dynamic=True)
self.dense = keras.layers.Dense(
1, kernel_initializer='zeros', bias_initializer='ones')
Reported by Pylint.
Line: 44
Column: 7
self.dense = keras.layers.Dense(
1, kernel_initializer='zeros', bias_initializer='ones')
def call(self, inputs):
return self.dense(inputs)
model = DynamicModel()
model.compile(
'rmsprop', 'mae',
Reported by Pylint.
Line: 192
Column: 3
model.fit(dataset, steps_per_epoch=2, epochs=1, verbose=0,
validation_data=validation_dataset)
# TODO(b/120931266): Enable test on subclassed models after bug causing an
# extra dimension to be added to predict outputs is fixed.
@keras_parameterized.run_with_all_model_types(exclude_models='subclass')
def test_generator_methods(self):
model = testing_utils.get_small_mlp(10, 4, 3)
optimizer = rmsprop.RMSprop(learning_rate=0.001)
Reported by Pylint.
Line: 29
Column: 1
from keras.optimizer_v2 import rmsprop
class TrainingTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_dynamic_model_has_trainable_weights(self):
if not tf.executing_eagerly():
# Only test Eager modes, as Graph mode is not relevant for dynamic models.
Reported by Pylint.
Line: 31
Column: 1
class TrainingTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_dynamic_model_has_trainable_weights(self):
if not tf.executing_eagerly():
# Only test Eager modes, as Graph mode is not relevant for dynamic models.
return
Reported by Pylint.
Line: 32
Column: 3
class TrainingTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_dynamic_model_has_trainable_weights(self):
if not tf.executing_eagerly():
# Only test Eager modes, as Graph mode is not relevant for dynamic models.
return
class DynamicModel(keras.Model):
Reported by Pylint.
Line: 32
Column: 1
class TrainingTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_dynamic_model_has_trainable_weights(self):
if not tf.executing_eagerly():
# Only test Eager modes, as Graph mode is not relevant for dynamic models.
return
class DynamicModel(keras.Model):
Reported by Pylint.
Line: 33
Column: 1
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_dynamic_model_has_trainable_weights(self):
if not tf.executing_eagerly():
# Only test Eager modes, as Graph mode is not relevant for dynamic models.
return
class DynamicModel(keras.Model):
Reported by Pylint.
keras/applications/mobilenet_v3.py
201 issues
Line: 19
Column: 1
# pylint: disable=missing-function-docstring
"""MobileNet v3 models for Keras."""
import tensorflow.compat.v2 as tf
from keras import backend
from keras import models
from keras.applications import imagenet_utils
from keras.layers import VersionAwareLayers
Reported by Pylint.
Line: 27
Column: 1
from keras.layers import VersionAwareLayers
from keras.utils import data_utils
from keras.utils import layer_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
# TODO(scottzhu): Change this to the GCS path.
BASE_WEIGHT_PATH = ('https://storage.googleapis.com/tensorflow/'
Reported by Pylint.
Line: 28
Column: 1
from keras.utils import data_utils
from keras.utils import layer_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
# TODO(scottzhu): Change this to the GCS path.
BASE_WEIGHT_PATH = ('https://storage.googleapis.com/tensorflow/'
'keras-applications/mobilenet_v3/')
Reported by Pylint.
Line: 31
Column: 3
from tensorflow.python.util.tf_export import keras_export
# TODO(scottzhu): Change this to the GCS path.
BASE_WEIGHT_PATH = ('https://storage.googleapis.com/tensorflow/'
'keras-applications/mobilenet_v3/')
WEIGHTS_HASHES = {
'large_224_0.75_float': ('765b44a33ad4005b3ac83185abf1d0eb',
'e7b4d1071996dd51a2c2ca2424570e20'),
Reported by Pylint.
Line: 194
Column: 9
is_input_t_tensor = backend.is_keras_tensor(
layer_utils.get_source_inputs(input_tensor))
except ValueError:
raise ValueError('input_tensor: ', input_tensor,
'is not type input_tensor. '
f'Received type(input_tensor)={type(input_tensor)}')
if is_input_t_tensor:
if backend.image_data_format() == 'channels_first':
if backend.int_shape(input_tensor)[1] != input_shape[1]:
Reported by Pylint.
Line: 223
Column: 7
try:
backend.is_keras_tensor(input_tensor)
except ValueError:
raise ValueError('input_tensor: ', input_tensor, 'is type: ',
type(input_tensor), 'which is not a valid type')
if backend.is_keras_tensor(input_tensor):
if backend.image_data_format() == 'channels_first':
rows = backend.int_shape(input_tensor)[2]
Reported by Pylint.
Line: 158
Column: 1
"""
def MobileNetV3(stack_fn,
last_point_ch,
input_shape=None,
alpha=1.0,
model_type='large',
minimalistic=False,
Reported by Pylint.
Line: 158
Column: 1
"""
def MobileNetV3(stack_fn,
last_point_ch,
input_shape=None,
alpha=1.0,
model_type='large',
minimalistic=False,
Reported by Pylint.
Line: 158
Column: 1
"""
def MobileNetV3(stack_fn,
last_point_ch,
input_shape=None,
alpha=1.0,
model_type='large',
minimalistic=False,
Reported by Pylint.
Line: 158
Column: 1
"""
def MobileNetV3(stack_fn,
last_point_ch,
input_shape=None,
alpha=1.0,
model_type='large',
minimalistic=False,
Reported by Pylint.
keras/applications/nasnet.py
193 issues
Line: 41
Column: 1
https://arxiv.org/abs/1707.07012) (CVPR 2018)
"""
import tensorflow.compat.v2 as tf
from keras import backend
from keras.applications import imagenet_utils
from keras.engine import training
from keras.layers import VersionAwareLayers
Reported by Pylint.
Line: 49
Column: 1
from keras.layers import VersionAwareLayers
from keras.utils import data_utils
from keras.utils import layer_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
BASE_WEIGHTS_PATH = ('https://storage.googleapis.com/tensorflow/'
'keras-applications/nasnet/')
Reported by Pylint.
Line: 50
Column: 1
from keras.utils import data_utils
from keras.utils import layer_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
BASE_WEIGHTS_PATH = ('https://storage.googleapis.com/tensorflow/'
'keras-applications/nasnet/')
NASNET_MOBILE_WEIGHT_PATH = BASE_WEIGHTS_PATH + 'NASNet-mobile.h5'
Reported by Pylint.
Line: 63
Column: 1
layers = VersionAwareLayers()
def NASNet(input_shape=None,
penultimate_filters=4032,
num_blocks=6,
stem_block_filters=96,
skip_reduction=True,
filter_multiplier=2,
Reported by Pylint.
Line: 63
Column: 1
layers = VersionAwareLayers()
def NASNet(input_shape=None,
penultimate_filters=4032,
num_blocks=6,
stem_block_filters=96,
skip_reduction=True,
filter_multiplier=2,
Reported by Pylint.
Line: 63
Column: 1
layers = VersionAwareLayers()
def NASNet(input_shape=None,
penultimate_filters=4032,
num_blocks=6,
stem_block_filters=96,
skip_reduction=True,
filter_multiplier=2,
Reported by Pylint.
Line: 63
Column: 1
layers = VersionAwareLayers()
def NASNet(input_shape=None,
penultimate_filters=4032,
num_blocks=6,
stem_block_filters=96,
skip_reduction=True,
filter_multiplier=2,
Reported by Pylint.
Line: 76
Column: 1
classes=1000,
default_size=None,
classifier_activation='softmax'):
"""Instantiates a NASNet model.
Reference:
- [Learning Transferable Architectures for Scalable Image Recognition](
https://arxiv.org/abs/1707.07012) (CVPR 2018)
Reported by Pylint.
Line: 152
Column: 1
Returns:
A `keras.Model` instance.
"""
if not (weights in {'imagenet', None} or tf.io.gfile.exists(weights)):
raise ValueError('The `weights` argument should be either '
'`None` (random initialization), `imagenet` '
'(pre-training on ImageNet), '
'or the path to the weights file to be loaded.')
Reported by Pylint.
Line: 153
Column: 1
A `keras.Model` instance.
"""
if not (weights in {'imagenet', None} or tf.io.gfile.exists(weights)):
raise ValueError('The `weights` argument should be either '
'`None` (random initialization), `imagenet` '
'(pre-training on ImageNet), '
'or the path to the weights file to be loaded.')
if weights == 'imagenet' and include_top and classes != 1000:
Reported by Pylint.