The following issues were found
keras/mixed_precision/policy_test.py
192 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests Policies."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
from keras import combinations
from keras import testing_utils
from keras.engine import base_layer_utils
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
from keras import combinations
from keras import testing_utils
from keras.engine import base_layer_utils
from keras.mixed_precision import device_compatibility_check
from keras.mixed_precision import policy as mp_policy
Reported by Pylint.
Line: 26
Column: 1
from keras.mixed_precision import device_compatibility_check
from keras.mixed_precision import policy as mp_policy
from keras.optimizer_v2 import gradient_descent
from tensorflow.python.platform import tf_logging
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
class PolicyTest(tf.test.TestCase, parameterized.TestCase):
"""Tests Policies."""
Reported by Pylint.
Line: 193
Column: 5
if not tf.executing_eagerly():
self.skipTest('Run in eager mode only.')
device_compatibility_check._logged_compatibility_check = False
with tf.compat.v1.test.mock.patch.object(tf_logging, 'warning') as mock_warn:
mp_policy.Policy('mixed_float16')
if tf.config.list_physical_devices('GPU'):
mock_warn.assert_not_called()
else:
Reported by Pylint.
Line: 31
Column: 1
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
class PolicyTest(tf.test.TestCase, parameterized.TestCase):
"""Tests Policies."""
@testing_utils.enable_v2_dtype_behavior
def test_dtype_attributes(self):
for dtype in 'int32', 'bool', 'float16', 'float32':
policy = mp_policy.Policy(dtype)
Reported by Pylint.
Line: 33
Column: 1
class PolicyTest(tf.test.TestCase, parameterized.TestCase):
"""Tests Policies."""
@testing_utils.enable_v2_dtype_behavior
def test_dtype_attributes(self):
for dtype in 'int32', 'bool', 'float16', 'float32':
policy = mp_policy.Policy(dtype)
self.assertEqual(policy.name, dtype)
self.assertEqual(policy.compute_dtype, dtype)
Reported by Pylint.
Line: 34
Column: 3
"""Tests Policies."""
@testing_utils.enable_v2_dtype_behavior
def test_dtype_attributes(self):
for dtype in 'int32', 'bool', 'float16', 'float32':
policy = mp_policy.Policy(dtype)
self.assertEqual(policy.name, dtype)
self.assertEqual(policy.compute_dtype, dtype)
self.assertEqual(policy.variable_dtype, dtype)
Reported by Pylint.
Line: 34
Column: 1
"""Tests Policies."""
@testing_utils.enable_v2_dtype_behavior
def test_dtype_attributes(self):
for dtype in 'int32', 'bool', 'float16', 'float32':
policy = mp_policy.Policy(dtype)
self.assertEqual(policy.name, dtype)
self.assertEqual(policy.compute_dtype, dtype)
self.assertEqual(policy.variable_dtype, dtype)
Reported by Pylint.
Line: 35
Column: 1
@testing_utils.enable_v2_dtype_behavior
def test_dtype_attributes(self):
for dtype in 'int32', 'bool', 'float16', 'float32':
policy = mp_policy.Policy(dtype)
self.assertEqual(policy.name, dtype)
self.assertEqual(policy.compute_dtype, dtype)
self.assertEqual(policy.variable_dtype, dtype)
Reported by Pylint.
Line: 36
Column: 1
@testing_utils.enable_v2_dtype_behavior
def test_dtype_attributes(self):
for dtype in 'int32', 'bool', 'float16', 'float32':
policy = mp_policy.Policy(dtype)
self.assertEqual(policy.name, dtype)
self.assertEqual(policy.compute_dtype, dtype)
self.assertEqual(policy.variable_dtype, dtype)
for dtype in 'float16', 'bfloat16':
Reported by Pylint.
keras/layers/preprocessing/discretization_test.py
192 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for Keras discretization preprocessing layer."""
import tensorflow.compat.v2 as tf
import os
from absl.testing import parameterized
Reported by Pylint.
Line: 21
Column: 1
import os
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
Reported by Pylint.
Line: 223
Column: 5
input_data = keras.Input(shape=input_shape)
output = layer(input_data)
model = keras.Model(input_data, output)
model._run_eagerly = testing_utils.should_run_eagerly()
output_data = model.predict(test_data)
self.assertAllClose(expected, output_data)
def test_multiple_adapts(self):
first_adapt = [[1], [2], [3]]
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import os
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 33
Column: 1
@keras_parameterized.run_all_keras_modes
class DiscretizationTest(keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_bucketize_with_explicit_buckets_integer(self):
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
Reported by Pylint.
Line: 36
Column: 3
class DiscretizationTest(keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_bucketize_with_explicit_buckets_integer(self):
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
Reported by Pylint.
Line: 36
Column: 1
class DiscretizationTest(keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_bucketize_with_explicit_buckets_integer(self):
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
Reported by Pylint.
Line: 37
Column: 1
preprocessing_test_utils.PreprocessingLayerTest):
def test_bucketize_with_explicit_buckets_integer(self):
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
input_data = keras.Input(shape=(4,))
Reported by Pylint.
Line: 39
Column: 1
def test_bucketize_with_explicit_buckets_integer(self):
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
input_data = keras.Input(shape=(4,))
layer = discretization.Discretization(bin_boundaries=[0., 1., 2.])
bucket_data = layer(input_data)
Reported by Pylint.
Line: 40
Column: 1
input_array = np.array([[-1.5, 1.0, 3.4, .5], [0.0, 3.0, 1.3, 0.0]])
expected_output = [[0, 2, 3, 1], [1, 3, 2, 1]]
expected_output_shape = [None, 4]
input_data = keras.Input(shape=(4,))
layer = discretization.Discretization(bin_boundaries=[0., 1., 2.])
bucket_data = layer(input_data)
self.assertAllEqual(expected_output_shape, bucket_data.shape.as_list())
Reported by Pylint.
keras/tests/custom_training_loop_test.py
190 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for custom training loops."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
from keras import testing_utils
Reported by Pylint.
Line: 22
Column: 1
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
from keras import testing_utils
class LayerWithLosses(keras.layers.Layer):
Reported by Pylint.
Line: 23
Column: 1
import numpy as np
import keras
from keras import keras_parameterized
from keras import testing_utils
class LayerWithLosses(keras.layers.Layer):
Reported by Pylint.
Line: 24
Column: 1
import keras
from keras import keras_parameterized
from keras import testing_utils
class LayerWithLosses(keras.layers.Layer):
def build(self, input_shape):
Reported by Pylint.
Line: 29
Column: 19
class LayerWithLosses(keras.layers.Layer):
def build(self, input_shape):
self.v = self.add_weight(
name='hey',
shape=(),
initializer='ones',
regularizer=keras.regularizers.l1(100))
Reported by Pylint.
Line: 30
Column: 5
class LayerWithLosses(keras.layers.Layer):
def build(self, input_shape):
self.v = self.add_weight(
name='hey',
shape=(),
initializer='ones',
regularizer=keras.regularizers.l1(100))
Reported by Pylint.
Line: 43
Column: 19
class LayerWithMetrics(keras.layers.Layer):
def build(self, input_shape):
self.mean = keras.metrics.Mean(name='mean_object')
def call(self, inputs):
self.add_metric(
tf.reduce_mean(inputs), name='mean_tensor', aggregation='mean')
Reported by Pylint.
Line: 44
Column: 5
class LayerWithMetrics(keras.layers.Layer):
def build(self, input_shape):
self.mean = keras.metrics.Mean(name='mean_object')
def call(self, inputs):
self.add_metric(
tf.reduce_mean(inputs), name='mean_tensor', aggregation='mean')
self.add_metric(self.mean(inputs))
Reported by Pylint.
Line: 56
Column: 5
class LayerWithTrainingArg(keras.layers.Layer):
def call(self, inputs, training=None):
self.training = training
if training:
return inputs
else:
return 0. * inputs
Reported by Pylint.
keras/legacy_tf_layers/variable_scope_shim.py
190 issues
Line: 15
Column: 1
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# pylint: disable=g-classes-have-attributes
"""Contains a shim to allow using TF1 get_variable code in TF2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
Reported by Pylint.
Line: 21
Column: 1
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
import functools
from keras.engine import base_layer
from keras.utils import tf_inspect
from keras.utils import layer_utils
Reported by Pylint.
Line: 28
Column: 1
from keras.utils import tf_inspect
from keras.utils import layer_utils
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export # pylint: disable=g-direct-tensorflow-import
def as_shape(shape):
Reported by Pylint.
Line: 29
Column: 1
from keras.utils import layer_utils
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export # pylint: disable=g-direct-tensorflow-import
def as_shape(shape):
"""Converts the given object to a TensorShape."""
Reported by Pylint.
Line: 30
Column: 1
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export # pylint: disable=g-direct-tensorflow-import
def as_shape(shape):
"""Converts the given object to a TensorShape."""
if isinstance(shape, tf.TensorShape):
Reported by Pylint.
Line: 30
Column: 1
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export # pylint: disable=g-direct-tensorflow-import
def as_shape(shape):
"""Converts the given object to a TensorShape."""
if isinstance(shape, tf.TensorShape):
Reported by Pylint.
Line: 457
Column: 1
return found_var
# The code below handles only the case of creating a new variable.
if reuse is True: # pylint: disable=g-bool-id-comparison
raise ValueError("Variable %s does not exist, or was not created with "
"tf.get_variable(). Did you mean to set "
"reuse=tf.AUTO_REUSE in VarScope?" % name)
# Create the tensor to initialize the variable with default value.
Reported by Pylint.
Line: 112
Column: 9
try:
aggregation = tf.VariableAggregation(aggregation)
except ValueError:
raise ValueError(
"Invalid variable aggregation mode: {} for variable: {}".format(
aggregation, name))
if synchronization is None:
synchronization = tf.VariableSynchronization.AUTO
else:
Reported by Pylint.
Line: 121
Column: 7
try:
synchronization = tf.VariableSynchronization(synchronization)
except ValueError:
raise ValueError(
"Invalid variable synchronization mode: {} for variable: {}".format(
synchronization, name))
if trainable is None:
trainable = synchronization != tf.VariableSynchronization.ON_READ
return synchronization, aggregation, trainable
Reported by Pylint.
Line: 23
Column: 1
import tensorflow.compat.v2 as tf
import functools
from keras.engine import base_layer
from keras.utils import tf_inspect
from keras.utils import layer_utils
from tensorflow.python.ops import variable_scope as vs
Reported by Pylint.
keras/layers/gru_test.py
190 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for GRU layer."""
import tensorflow.compat.v2 as tf
import copy
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 21
Column: 1
import copy
from absl.testing import parameterized
import numpy as np
import keras
from keras import combinations
from keras import keras_parameterized
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import copy
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 32
Column: 1
@keras_parameterized.run_all_keras_modes
class GRULayerTest(keras_parameterized.TestCase):
def test_return_sequences_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
Reported by Pylint.
Line: 34
Column: 1
@keras_parameterized.run_all_keras_modes
class GRULayerTest(keras_parameterized.TestCase):
def test_return_sequences_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
Reported by Pylint.
Line: 34
Column: 3
@keras_parameterized.run_all_keras_modes
class GRULayerTest(keras_parameterized.TestCase):
def test_return_sequences_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
Reported by Pylint.
Line: 34
Column: 3
@keras_parameterized.run_all_keras_modes
class GRULayerTest(keras_parameterized.TestCase):
def test_return_sequences_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
Reported by Pylint.
Line: 34
Column: 3
@keras_parameterized.run_all_keras_modes
class GRULayerTest(keras_parameterized.TestCase):
def test_return_sequences_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
Reported by Pylint.
Line: 35
Column: 1
class GRULayerTest(keras_parameterized.TestCase):
def test_return_sequences_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
keras.layers.GRU,
Reported by Pylint.
Line: 36
Column: 1
def test_return_sequences_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
keras.layers.GRU,
kwargs={'units': units,
Reported by Pylint.
keras/layers/multi_head_attention_test.py
189 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for the attention layer."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
Reported by Pylint.
Line: 136
Column: 28
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=12,
key_dim=64,
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.02))
# Create a 3-dimensional input (the first dimension is implicit).
query = keras.Input(shape=(40, 80))
output = test_layer(query, query)
self.assertEqual(output.shape.as_list(), [None, 40, 80])
Reported by Pylint.
Line: 125
Column: 22
self.assertNotAllClose(masked_output_data, unmasked_output_data)
if use_bias:
self.assertLen(test_layer._query_dense.trainable_variables, 2)
self.assertLen(test_layer._output_dense.trainable_variables, 2)
else:
self.assertLen(test_layer._query_dense.trainable_variables, 1)
self.assertLen(test_layer._output_dense.trainable_variables, 1)
Reported by Pylint.
Line: 126
Column: 22
if use_bias:
self.assertLen(test_layer._query_dense.trainable_variables, 2)
self.assertLen(test_layer._output_dense.trainable_variables, 2)
else:
self.assertLen(test_layer._query_dense.trainable_variables, 1)
self.assertLen(test_layer._output_dense.trainable_variables, 1)
def test_initializer(self):
Reported by Pylint.
Line: 128
Column: 22
self.assertLen(test_layer._query_dense.trainable_variables, 2)
self.assertLen(test_layer._output_dense.trainable_variables, 2)
else:
self.assertLen(test_layer._query_dense.trainable_variables, 1)
self.assertLen(test_layer._output_dense.trainable_variables, 1)
def test_initializer(self):
"""Test with a specified initializer."""
test_layer = multi_head_attention.MultiHeadAttention(
Reported by Pylint.
Line: 129
Column: 22
self.assertLen(test_layer._output_dense.trainable_variables, 2)
else:
self.assertLen(test_layer._query_dense.trainable_variables, 1)
self.assertLen(test_layer._output_dense.trainable_variables, 1)
def test_initializer(self):
"""Test with a specified initializer."""
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=12,
Reported by Pylint.
Line: 244
Column: 3
class SubclassAttention(multi_head_attention.MultiHeadAttention):
def _build_attention(self, qkv_rank):
pass
def _compute_attention(self,
query_tensor,
key_tensor,
Reported by Pylint.
Line: 247
Column: 3
def _build_attention(self, qkv_rank):
pass
def _compute_attention(self,
query_tensor,
key_tensor,
value_tensor,
attention_mask=None,
training=None):
Reported by Pylint.
Line: 248
Column: 26
pass
def _compute_attention(self,
query_tensor,
key_tensor,
value_tensor,
attention_mask=None,
training=None):
return value_tensor, None
Reported by Pylint.
keras/integration_test/multi_worker_tutorial_test.py
187 issues
Line: 23
Column: 1
import unittest
import uuid
import zipfile
from absl import logging
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
PER_WORKER_BATCH_SIZE = 64
Reported by Pylint.
Line: 24
Column: 1
import uuid
import zipfile
from absl import logging
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
PER_WORKER_BATCH_SIZE = 64
NUM_WORKERS = 2
Reported by Pylint.
Line: 26
Column: 1
from absl import logging
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
PER_WORKER_BATCH_SIZE = 64
NUM_WORKERS = 2
NUM_EPOCHS = 2
NUM_STEPS_PER_EPOCH = 50
Reported by Pylint.
Line: 253
Column: 1
multi_worker_model = self.build_cnn_model()
multi_worker_dataset = strategy.distribute_datasets_from_function(
lambda input_context: self.dataset_fn(global_batch_size, # pylint: disable=g-long-lambda
input_context))
optimizer = tf.keras.optimizers.RMSprop(learning_rate=0.001)
train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(
name='train_accuracy')
Reported by Pylint.
Line: 66
Column: 3
Please see below test method docs for what actual tutorial is being covered.
"""
# TODO(rchao): Add a test to demonstrate gather with MWMS.
@contextlib.contextmanager
def skip_fetch_failure_exception(self):
try:
yield
Reported by Pylint.
Line: 133
Column: 34
@tf.__internal__.test.combinations.generate(
tf.__internal__.test.combinations.combine(
mode=['eager'], tf_api_version=2))
def testMwmsWithModelFit(self, mode):
"""Test multi-worker training flow demo'ed in go/multi-worker-with-keras.
This test should be kept in sync with the code samples in
go/multi-worker-with-keras.
Reported by Pylint.
Line: 241
Column: 29
@tf.__internal__.test.combinations.generate(
tf.__internal__.test.combinations.combine(
mode=['eager'], tf_api_version=2))
def testMwmsWithCtl(self, mode):
"""Test multi-worker CTL training flow demo'ed in a to-be-added tutorial."""
def proc_func(checkpoint_dir):
global_batch_size = PER_WORKER_BATCH_SIZE * NUM_WORKERS
strategy = tf.distribute.experimental.MultiWorkerMirroredStrategy()
Reported by Pylint.
Line: 42
Column: 1
# 2) Only `worker` task type is used; in this case, worker 0 is
# regarded as the chief. The implementation demonstrated here
# is for this case.
return task_type == 'worker' and task_id == 0
def _get_temp_dir(dirpath, task_id):
base_dirpath = 'workertemp_' + str(task_id)
temp_dir = os.path.join(dirpath, base_dirpath)
Reported by Pylint.
Line: 46
Column: 1
def _get_temp_dir(dirpath, task_id):
base_dirpath = 'workertemp_' + str(task_id)
temp_dir = os.path.join(dirpath, base_dirpath)
tf.io.gfile.makedirs(temp_dir)
return temp_dir
Reported by Pylint.
Line: 47
Column: 1
def _get_temp_dir(dirpath, task_id):
base_dirpath = 'workertemp_' + str(task_id)
temp_dir = os.path.join(dirpath, base_dirpath)
tf.io.gfile.makedirs(temp_dir)
return temp_dir
def write_filepath(filepath, task_type, task_id):
Reported by Pylint.
keras/initializers/initializers_test.py
185 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for Keras initializers."""
import tensorflow.compat.v2 as tf
import numpy as np
from keras import backend
from keras import combinations
Reported by Pylint.
Line: 75
Column: 11
tensor_shape = (9, 6, 7)
with self.cached_session():
self._runner(
initializers.RandomUniformV2(minval=-1, maxval=1, seed=124),
tensor_shape,
target_mean=0.,
target_max=1,
target_min=-1)
Reported by Pylint.
Line: 85
Column: 11
tensor_shape = (8, 12, 99)
with self.cached_session():
self._runner(
initializers.RandomNormalV2(mean=0, stddev=1, seed=153),
tensor_shape,
target_mean=0.,
target_std=1)
def test_truncated_normal(self):
Reported by Pylint.
Line: 94
Column: 11
tensor_shape = (12, 99, 7)
with self.cached_session():
self._runner(
initializers.TruncatedNormalV2(mean=0, stddev=1, seed=126),
tensor_shape,
target_mean=0.,
target_max=2,
target_min=-2)
Reported by Pylint.
Line: 104
Column: 11
tensor_shape = (5, 6, 4)
with self.cached_session():
self._runner(
initializers.ConstantV2(2.),
tensor_shape,
target_mean=2,
target_max=2,
target_min=2)
Reported by Pylint.
Line: 116
Column: 11
fan_in, _ = _compute_fans(tensor_shape)
std = np.sqrt(1. / fan_in)
self._runner(
initializers.LecunUniformV2(seed=123),
tensor_shape,
target_mean=0.,
target_std=std)
def test_glorot_uniform(self):
Reported by Pylint.
Line: 127
Column: 11
fan_in, fan_out = _compute_fans(tensor_shape)
std = np.sqrt(2. / (fan_in + fan_out))
self._runner(
initializers.GlorotUniformV2(seed=123),
tensor_shape,
target_mean=0.,
target_std=std)
def test_he_uniform(self):
Reported by Pylint.
Line: 138
Column: 11
fan_in, _ = _compute_fans(tensor_shape)
std = np.sqrt(2. / fan_in)
self._runner(
initializers.HeUniformV2(seed=123),
tensor_shape,
target_mean=0.,
target_std=std)
def test_lecun_normal(self):
Reported by Pylint.
Line: 149
Column: 11
fan_in, _ = _compute_fans(tensor_shape)
std = np.sqrt(1. / fan_in)
self._runner(
initializers.LecunNormalV2(seed=123),
tensor_shape,
target_mean=0.,
target_std=std)
def test_glorot_normal(self):
Reported by Pylint.
Line: 160
Column: 11
fan_in, fan_out = _compute_fans(tensor_shape)
std = np.sqrt(2. / (fan_in + fan_out))
self._runner(
initializers.GlorotNormalV2(seed=123),
tensor_shape,
target_mean=0.,
target_std=std)
def test_he_normal(self):
Reported by Pylint.
keras/integration_test/forwardprop_test.py
183 issues
Line: 18
Column: 1
import functools
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
def _jvp(f, primals, tangents):
Reported by Pylint.
Line: 20
Column: 1
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
def _jvp(f, primals, tangents):
"""Compute the jacobian of `f` at `primals` multiplied by `tangents`."""
with tf.autodiff.ForwardAccumulator(primals, tangents) as acc:
Reported by Pylint.
Line: 1
Column: 1
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
Reported by Pylint.
Line: 23
Column: 1
import tensorflow as tf
def _jvp(f, primals, tangents):
"""Compute the jacobian of `f` at `primals` multiplied by `tangents`."""
with tf.autodiff.ForwardAccumulator(primals, tangents) as acc:
primals_out = f(*primals)
return primals_out, acc.jvp(
primals_out, unconnected_gradients=tf.UnconnectedGradients.ZERO)
Reported by Pylint.
Line: 24
Column: 1
def _jvp(f, primals, tangents):
"""Compute the jacobian of `f` at `primals` multiplied by `tangents`."""
with tf.autodiff.ForwardAccumulator(primals, tangents) as acc:
primals_out = f(*primals)
return primals_out, acc.jvp(
primals_out, unconnected_gradients=tf.UnconnectedGradients.ZERO)
Reported by Pylint.
Line: 25
Column: 1
def _jvp(f, primals, tangents):
"""Compute the jacobian of `f` at `primals` multiplied by `tangents`."""
with tf.autodiff.ForwardAccumulator(primals, tangents) as acc:
primals_out = f(*primals)
return primals_out, acc.jvp(
primals_out, unconnected_gradients=tf.UnconnectedGradients.ZERO)
Reported by Pylint.
Line: 26
Column: 1
def _jvp(f, primals, tangents):
"""Compute the jacobian of `f` at `primals` multiplied by `tangents`."""
with tf.autodiff.ForwardAccumulator(primals, tangents) as acc:
primals_out = f(*primals)
return primals_out, acc.jvp(
primals_out, unconnected_gradients=tf.UnconnectedGradients.ZERO)
def _jacfwd(f, primals):
Reported by Pylint.
Line: 27
Column: 1
"""Compute the jacobian of `f` at `primals` multiplied by `tangents`."""
with tf.autodiff.ForwardAccumulator(primals, tangents) as acc:
primals_out = f(*primals)
return primals_out, acc.jvp(
primals_out, unconnected_gradients=tf.UnconnectedGradients.ZERO)
def _jacfwd(f, primals):
"""Compute the jacobian of `f` at `primals` using forward-mode autodiff."""
Reported by Pylint.
Line: 31
Column: 1
primals_out, unconnected_gradients=tf.UnconnectedGradients.ZERO)
def _jacfwd(f, primals):
"""Compute the jacobian of `f` at `primals` using forward-mode autodiff."""
jac_flat = []
flat_primals = tf.nest.flatten(primals)
tangent_mask = [tf.zeros_like(primal) for primal in flat_primals]
for primal_index, primal in enumerate(flat_primals):
Reported by Pylint.
Line: 32
Column: 1
def _jacfwd(f, primals):
"""Compute the jacobian of `f` at `primals` using forward-mode autodiff."""
jac_flat = []
flat_primals = tf.nest.flatten(primals)
tangent_mask = [tf.zeros_like(primal) for primal in flat_primals]
for primal_index, primal in enumerate(flat_primals):
primal_vector = tf.reshape(primal, [-1])
Reported by Pylint.
keras/integration_test/function_test.py
182 issues
Line: 18
Column: 1
import sys
import tensorflow as tf
class MiniModel(tf.keras.Model):
"""Minimal model for mnist.
Reported by Pylint.
Line: 127
Column: 25
def testDecoratedMethodGetConcreteFunction(self):
m = DefunnedMiniModel()
instance_call_one = m.call.get_concrete_function(
tf.ones([1, 2]), training=False)
instance_call_two = m.call.get_concrete_function(
inputs=tf.ones([1, 2]), training=False)
self.assertAllEqual(instance_call_one(tf.ones([1, 2])),
instance_call_two(tf.ones([1, 2])))
Reported by Pylint.
Line: 129
Column: 25
m = DefunnedMiniModel()
instance_call_one = m.call.get_concrete_function(
tf.ones([1, 2]), training=False)
instance_call_two = m.call.get_concrete_function(
inputs=tf.ones([1, 2]), training=False)
self.assertAllEqual(instance_call_one(tf.ones([1, 2])),
instance_call_two(tf.ones([1, 2])))
# Also make sure get_concrete_function works on the class method
Reported by Pylint.
Line: 135
Column: 5
instance_call_two(tf.ones([1, 2])))
# Also make sure get_concrete_function works on the class method
DefunnedMiniModel.call.get_concrete_function(
m, tf.ones([1, 2]), training=False)
DefunnedMiniModel.call.get_concrete_function(
m, inputs=tf.ones([1, 2]), training=True)
def testDecoratedMethodVariableCleanup(self):
Reported by Pylint.
Line: 137
Column: 5
# Also make sure get_concrete_function works on the class method
DefunnedMiniModel.call.get_concrete_function(
m, tf.ones([1, 2]), training=False)
DefunnedMiniModel.call.get_concrete_function(
m, inputs=tf.ones([1, 2]), training=True)
def testDecoratedMethodVariableCleanup(self):
m = DefunnedMiniModel()
m(tf.ones([1, 2])) # pylint:disable=not-callable
Reported by Pylint.
Line: 32
Column: 26
self.fc = tf.keras.layers.Dense(1, name='fc', kernel_initializer='ones',
bias_initializer='ones')
def call(self, inputs, training=True):
return self.fc(inputs)
class DefunnedMiniModel(MiniModel):
Reported by Pylint.
Line: 1
Column: 1
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
Reported by Pylint.
Line: 21
Column: 1
import tensorflow as tf
class MiniModel(tf.keras.Model):
"""Minimal model for mnist.
Useful for testing and debugging on slow TPU simulators.
"""
Reported by Pylint.
Line: 22
Column: 1
class MiniModel(tf.keras.Model):
"""Minimal model for mnist.
Useful for testing and debugging on slow TPU simulators.
"""
def __init__(self):
Reported by Pylint.
Line: 27
Column: 1
Useful for testing and debugging on slow TPU simulators.
"""
def __init__(self):
super(MiniModel, self).__init__(name='')
self.fc = tf.keras.layers.Dense(1, name='fc', kernel_initializer='ones',
bias_initializer='ones')
def call(self, inputs, training=True):
Reported by Pylint.