The following issues were found
keras/legacy_tf_layers/normalization_test.py
1045 issues
Line: 21
Column: 1
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
import os
import numpy as np
Reported by Pylint.
Line: 27
Column: 1
import numpy as np
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.framework import test_util
from keras.legacy_tf_layers import convolutional as conv_layers
from keras.legacy_tf_layers import normalization as normalization_layers
Reported by Pylint.
Line: 28
Column: 1
import numpy as np
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.framework import test_util
from keras.legacy_tf_layers import convolutional as conv_layers
from keras.legacy_tf_layers import normalization as normalization_layers
@test_util.run_v1_only('b/120545219')
Reported by Pylint.
Line: 45
Column: 5
use_bias=False,
kernel_initializer=tf.compat.v1.ones_initializer())
bn_layer = normalization_layers.BatchNormalization(fused=fused)
bn_layer._bessels_correction_test_only = False
training = not freeze_mode
bn = bn_layer.apply(conv, training=training)
loss = tf.reduce_sum(tf.abs(bn))
optimizer = tf.compat.v1.train.GradientDescentOptimizer(0.01)
if not freeze_mode:
Reported by Pylint.
Line: 295
Column: 23
self.assertEqual(len(bn.trainable_variables), 2)
self.assertEqual(len(bn.non_trainable_variables), 2)
for var in bn.variables:
self.assertTrue(var.dtype._is_ref_dtype)
# Test that updates were created and added to UPDATE_OPS.
self.assertEqual(len(bn.updates), 2)
self.assertListEqual(
tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.UPDATE_OPS), bn.updates)
Reported by Pylint.
Line: 1
Column: 1
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
Reported by Pylint.
Line: 23
Column: 1
import tensorflow.compat.v2 as tf
import os
import numpy as np
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.framework import test_util
Reported by Pylint.
Line: 34
Column: 1
@test_util.run_v1_only('b/120545219')
class BNTest(tf.test.TestCase):
def _simple_model(self, image, fused, freeze_mode):
output_channels, kernel_size = 2, 3
conv = conv_layers.conv2d(
image,
Reported by Pylint.
Line: 34
Column: 1
@test_util.run_v1_only('b/120545219')
class BNTest(tf.test.TestCase):
def _simple_model(self, image, fused, freeze_mode):
output_channels, kernel_size = 2, 3
conv = conv_layers.conv2d(
image,
Reported by Pylint.
Line: 36
Column: 3
@test_util.run_v1_only('b/120545219')
class BNTest(tf.test.TestCase):
def _simple_model(self, image, fused, freeze_mode):
output_channels, kernel_size = 2, 3
conv = conv_layers.conv2d(
image,
output_channels,
kernel_size,
Reported by Pylint.
keras/saving/saved_model/saved_model_test.py
1041 issues
Line: 28
Column: 1
import shutil
import sys
from absl.testing import parameterized
import keras
from keras import combinations
from keras import keras_parameterized
from keras import regularizers
from keras import testing_utils
Reported by Pylint.
Line: 29
Column: 1
import sys
from absl.testing import parameterized
import keras
from keras import combinations
from keras import keras_parameterized
from keras import regularizers
from keras import testing_utils
from keras.feature_column.dense_features import DenseFeatures
Reported by Pylint.
Line: 30
Column: 1
from absl.testing import parameterized
import keras
from keras import combinations
from keras import keras_parameterized
from keras import regularizers
from keras import testing_utils
from keras.feature_column.dense_features import DenseFeatures
from keras.protobuf import saved_metadata_pb2
Reported by Pylint.
Line: 31
Column: 1
from absl.testing import parameterized
import keras
from keras import combinations
from keras import keras_parameterized
from keras import regularizers
from keras import testing_utils
from keras.feature_column.dense_features import DenseFeatures
from keras.protobuf import saved_metadata_pb2
from keras.protobuf import versions_pb2
Reported by Pylint.
Line: 32
Column: 1
import keras
from keras import combinations
from keras import keras_parameterized
from keras import regularizers
from keras import testing_utils
from keras.feature_column.dense_features import DenseFeatures
from keras.protobuf import saved_metadata_pb2
from keras.protobuf import versions_pb2
from keras.saving.saved_model import json_utils
Reported by Pylint.
Line: 33
Column: 1
from keras import combinations
from keras import keras_parameterized
from keras import regularizers
from keras import testing_utils
from keras.feature_column.dense_features import DenseFeatures
from keras.protobuf import saved_metadata_pb2
from keras.protobuf import versions_pb2
from keras.saving.saved_model import json_utils
from keras.saving.saved_model import load as keras_load
Reported by Pylint.
Line: 34
Column: 1
from keras import keras_parameterized
from keras import regularizers
from keras import testing_utils
from keras.feature_column.dense_features import DenseFeatures
from keras.protobuf import saved_metadata_pb2
from keras.protobuf import versions_pb2
from keras.saving.saved_model import json_utils
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import save_impl as keras_save
Reported by Pylint.
Line: 35
Column: 1
from keras import regularizers
from keras import testing_utils
from keras.feature_column.dense_features import DenseFeatures
from keras.protobuf import saved_metadata_pb2
from keras.protobuf import versions_pb2
from keras.saving.saved_model import json_utils
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import save_impl as keras_save
from keras.utils import control_flow_util
Reported by Pylint.
Line: 36
Column: 1
from keras import testing_utils
from keras.feature_column.dense_features import DenseFeatures
from keras.protobuf import saved_metadata_pb2
from keras.protobuf import versions_pb2
from keras.saving.saved_model import json_utils
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import save_impl as keras_save
from keras.utils import control_flow_util
from keras.utils import generic_utils
Reported by Pylint.
Line: 37
Column: 1
from keras.feature_column.dense_features import DenseFeatures
from keras.protobuf import saved_metadata_pb2
from keras.protobuf import versions_pb2
from keras.saving.saved_model import json_utils
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import save_impl as keras_save
from keras.utils import control_flow_util
from keras.utils import generic_utils
from keras.utils import tf_contextlib
Reported by Pylint.
keras/legacy_tf_layers/variable_scope_shim_test.py
1006 issues
Line: 21
Column: 1
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
import gc
import threading
from absl.testing import parameterized
Reported by Pylint.
Line: 26
Column: 1
import gc
import threading
from absl.testing import parameterized
import numpy
from tensorflow.python.framework import test_util
from keras import combinations
from keras import regularizers
from keras.engine import input_layer as input_layer_module
Reported by Pylint.
Line: 28
Column: 1
from absl.testing import parameterized
import numpy
from tensorflow.python.framework import test_util
from keras import combinations
from keras import regularizers
from keras.engine import input_layer as input_layer_module
from keras.engine import training as training_module
from keras.layers import core
Reported by Pylint.
Line: 36
Column: 1
from keras.layers import core
from keras.legacy_tf_layers import core as core_layers
from keras.legacy_tf_layers import variable_scope_shim
from tensorflow.python.ops import variable_scope
def run_inside_wrap_function_in_eager_mode(graph_function):
"""Decorator to execute the same graph code in eager and graph modes.
Reported by Pylint.
Line: 53
Column: 13
decorated function
"""
def wrap_and_execute(self):
store = variable_scope_shim._EagerVariableStore()
with variable_scope.with_variable_store(store):
# use the original function
graph_function(self)
return wrap_and_execute
Reported by Pylint.
Line: 71
Column: 10
@test_util.run_in_graph_and_eager_modes
@run_inside_wrap_function_in_eager_mode
def testGetVar(self):
vs = variable_scope._get_default_variable_store()
v = vs.get_variable("v", [1])
v1 = vs.get_variable("v", [1])
self.assertIs(v, v1)
@test_util.run_in_graph_and_eager_modes
Reported by Pylint.
Line: 79
Column: 10
@test_util.run_in_graph_and_eager_modes
@run_inside_wrap_function_in_eager_mode
def testNameExists(self):
vs = variable_scope._get_default_variable_store()
# No check by default, so we can both create and get existing names.
v = vs.get_variable("v", [1])
v1 = vs.get_variable("v", [1])
self.assertIs(v, v1)
Reported by Pylint.
Line: 90
Column: 10
@test_util.run_in_graph_and_eager_modes
@run_inside_wrap_function_in_eager_mode
def testNamelessStore(self):
vs = variable_scope._get_default_variable_store()
vs.get_variable("v1", [2])
vs.get_variable("v2", [2])
expected_names = ["%s:0" % name for name in ["v1", "v2"]]
self.assertEqual(
set(expected_names), set(v.name for v in vs._vars.values()))
Reported by Pylint.
Line: 95
Column: 50
vs.get_variable("v2", [2])
expected_names = ["%s:0" % name for name in ["v1", "v2"]]
self.assertEqual(
set(expected_names), set(v.name for v in vs._vars.values()))
# TODO(mihaimaruseac): Not converted to use wrap_function because of
# TypeError: Expected tf.group() expected Tensor arguments not 'None' with
# type '<type 'NoneType'>'
@test_util.run_in_graph_and_eager_modes
Reported by Pylint.
Line: 97
Column: 3
self.assertEqual(
set(expected_names), set(v.name for v in vs._vars.values()))
# TODO(mihaimaruseac): Not converted to use wrap_function because of
# TypeError: Expected tf.group() expected Tensor arguments not 'None' with
# type '<type 'NoneType'>'
@test_util.run_in_graph_and_eager_modes
def testVarScopeInitializer(self):
init = tf.compat.v1.constant_initializer(0.3)
Reported by Pylint.
keras/layers/convolutional.py
1000 issues
Line: 16
Column: 1
# limitations under the License.
# ==============================================================================
"""Keras convolution layers and image transformation layers."""
# pylint: disable=g-bad-import-order
import tensorflow.compat.v2 as tf
from keras import activations
from keras import backend
from keras import constraints
Reported by Pylint.
Line: 17
Column: 1
# ==============================================================================
"""Keras convolution layers and image transformation layers."""
# pylint: disable=g-bad-import-order
import tensorflow.compat.v2 as tf
from keras import activations
from keras import backend
from keras import constraints
from keras import initializers
Reported by Pylint.
Line: 37
Column: 1
# pylint: enable=unused-import
from keras.utils import conv_utils
from keras.utils import tf_utils
from tensorflow.python.util.tf_export import keras_export
# pylint: disable=g-classes-have-attributes
class Conv(Layer):
"""Abstract N-D convolution layer (private, used as implementation base).
Reported by Pylint.
Line: 38
Column: 1
from keras.utils import conv_utils
from keras.utils import tf_utils
from tensorflow.python.util.tf_export import keras_export
# pylint: disable=g-classes-have-attributes
class Conv(Layer):
"""Abstract N-D convolution layer (private, used as implementation base).
Reported by Pylint.
Line: 123
Column: 16
bias_constraint=None,
trainable=True,
name=None,
conv_op=None,
**kwargs):
super(Conv, self).__init__(
trainable=trainable,
name=name,
activity_regularizer=regularizers.get(activity_regularizer),
Reported by Pylint.
Line: 196
Column: 5
kernel_shape = self.kernel_size + (input_channel // self.groups,
self.filters)
self.kernel = self.add_weight(
name='kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
Reported by Pylint.
Line: 205
Column: 7
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_weight(
name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
Reported by Pylint.
Line: 214
Column: 7
trainable=True,
dtype=self.dtype)
else:
self.bias = None
channel_axis = self._get_channel_axis()
self.input_spec = InputSpec(min_ndim=self.rank + 2,
axes={channel_axis: input_channel})
self.built = True
Reported by Pylint.
Line: 237
Column: 3
data_format=self._tf_data_format,
name=self.__class__.__name__)
def call(self, inputs):
input_shape = inputs.shape
if self._is_causal: # Apply causal padding to inputs for Conv1D.
inputs = tf.pad(inputs, self._compute_causal_padding(inputs))
Reported by Pylint.
Line: 979
Column: 5
self.input_spec = InputSpec(ndim=3, axes={channel_axis: input_dim})
kernel_shape = self.kernel_size + (self.filters, input_dim)
self.kernel = self.add_weight(
name='kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
Reported by Pylint.
keras/optimizer_v2/optimizer_v2_test.py
976 issues
Line: 17
Column: 1
# ==============================================================================
"""Functional test for OptimizerV2."""
import tensorflow.compat.v2 as tf
import collections
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 21
Column: 1
import collections
from absl.testing import parameterized
import numpy as np
import keras
from tensorflow.python.framework import test_util
from keras import backend
Reported by Pylint.
Line: 24
Column: 1
from absl.testing import parameterized
import numpy as np
import keras
from tensorflow.python.framework import test_util
from keras import backend
from keras import callbacks
from keras import combinations
from keras import keras_parameterized
Reported by Pylint.
Line: 25
Column: 1
import numpy as np
import keras
from tensorflow.python.framework import test_util
from keras import backend
from keras import callbacks
from keras import combinations
from keras import keras_parameterized
from keras import losses
Reported by Pylint.
Line: 26
Column: 1
import keras
from tensorflow.python.framework import test_util
from keras import backend
from keras import callbacks
from keras import combinations
from keras import keras_parameterized
from keras import losses
from keras import optimizer_v1
Reported by Pylint.
Line: 27
Column: 1
import keras
from tensorflow.python.framework import test_util
from keras import backend
from keras import callbacks
from keras import combinations
from keras import keras_parameterized
from keras import losses
from keras import optimizer_v1
from keras import testing_utils
Reported by Pylint.
Line: 28
Column: 1
from tensorflow.python.framework import test_util
from keras import backend
from keras import callbacks
from keras import combinations
from keras import keras_parameterized
from keras import losses
from keras import optimizer_v1
from keras import testing_utils
from keras.engine import input_layer
Reported by Pylint.
Line: 29
Column: 1
from keras import backend
from keras import callbacks
from keras import combinations
from keras import keras_parameterized
from keras import losses
from keras import optimizer_v1
from keras import testing_utils
from keras.engine import input_layer
from keras.engine import sequential
Reported by Pylint.
Line: 30
Column: 1
from keras import callbacks
from keras import combinations
from keras import keras_parameterized
from keras import losses
from keras import optimizer_v1
from keras import testing_utils
from keras.engine import input_layer
from keras.engine import sequential
from keras.engine import training
Reported by Pylint.
Line: 31
Column: 1
from keras import combinations
from keras import keras_parameterized
from keras import losses
from keras import optimizer_v1
from keras import testing_utils
from keras.engine import input_layer
from keras.engine import sequential
from keras.engine import training
from keras.layers import core
Reported by Pylint.
keras/layers/preprocessing/image_preprocessing_test.py
945 issues
Line: 18
Column: 1
"""Tests for image preprocessing layers."""
import functools
from absl.testing import parameterized
from keras import keras_parameterized
from keras import testing_utils
from keras.engine import sequential
from keras.layers.preprocessing import image_preprocessing
Reported by Pylint.
Line: 25
Column: 1
from keras.engine import sequential
from keras.layers.preprocessing import image_preprocessing
import numpy as np
import tensorflow.compat.v2 as tf
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.distribute.mirrored_strategy import MirroredStrategy
from tensorflow.python.ops import gen_stateful_random_ops
from tensorflow.python.ops import gen_stateless_random_ops_v2
from tensorflow.python.ops import stateless_random_ops
Reported by Pylint.
Line: 26
Column: 1
from keras.layers.preprocessing import image_preprocessing
import numpy as np
import tensorflow.compat.v2 as tf
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.distribute.mirrored_strategy import MirroredStrategy
from tensorflow.python.ops import gen_stateful_random_ops
from tensorflow.python.ops import gen_stateless_random_ops_v2
from tensorflow.python.ops import stateless_random_ops
Reported by Pylint.
Line: 27
Column: 1
import numpy as np
import tensorflow.compat.v2 as tf
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.distribute.mirrored_strategy import MirroredStrategy
from tensorflow.python.ops import gen_stateful_random_ops
from tensorflow.python.ops import gen_stateless_random_ops_v2
from tensorflow.python.ops import stateless_random_ops
Reported by Pylint.
Line: 28
Column: 1
import tensorflow.compat.v2 as tf
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.distribute.mirrored_strategy import MirroredStrategy
from tensorflow.python.ops import gen_stateful_random_ops
from tensorflow.python.ops import gen_stateless_random_ops_v2
from tensorflow.python.ops import stateless_random_ops
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
Reported by Pylint.
Line: 29
Column: 1
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.distribute.mirrored_strategy import MirroredStrategy
from tensorflow.python.ops import gen_stateful_random_ops
from tensorflow.python.ops import gen_stateless_random_ops_v2
from tensorflow.python.ops import stateless_random_ops
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class ResizingTest(keras_parameterized.TestCase):
Reported by Pylint.
Line: 30
Column: 1
from tensorflow.python.distribute.mirrored_strategy import MirroredStrategy
from tensorflow.python.ops import gen_stateful_random_ops
from tensorflow.python.ops import gen_stateless_random_ops_v2
from tensorflow.python.ops import stateless_random_ops
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class ResizingTest(keras_parameterized.TestCase):
Reported by Pylint.
Line: 1
Column: 1
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
Reported by Pylint.
Line: 34
Column: 1
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class ResizingTest(keras_parameterized.TestCase):
def _run_test(self, kwargs, expected_height, expected_width):
np.random.seed(1337)
num_samples = 2
orig_height = 5
Reported by Pylint.
Line: 36
Column: 3
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class ResizingTest(keras_parameterized.TestCase):
def _run_test(self, kwargs, expected_height, expected_width):
np.random.seed(1337)
num_samples = 2
orig_height = 5
orig_width = 8
channels = 3
Reported by Pylint.
keras/metrics.py
918 issues
Line: 15
Column: 1
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-classes-have-attributes
# pylint: disable=g-doc-return-or-yield
"""Built-in metrics."""
import tensorflow.compat.v2 as tf
Reported by Pylint.
Line: 16
Column: 1
# limitations under the License.
# ==============================================================================
# pylint: disable=g-classes-have-attributes
# pylint: disable=g-doc-return-or-yield
"""Built-in metrics."""
import tensorflow.compat.v2 as tf
import abc
Reported by Pylint.
Line: 19
Column: 1
# pylint: disable=g-doc-return-or-yield
"""Built-in metrics."""
import tensorflow.compat.v2 as tf
import abc
import types
import warnings
Reported by Pylint.
Line: 52
Column: 1
from keras.utils.generic_utils import serialize_keras_object
from keras.utils.generic_utils import to_list
from keras.utils.tf_utils import is_tensor_or_variable
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
_SPARSE_CATEGORICAL_UPDATE_STATE_DOCSTRING = """Accumulates metric statistics.
Reported by Pylint.
Line: 53
Column: 1
from keras.utils.generic_utils import to_list
from keras.utils.tf_utils import is_tensor_or_variable
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
_SPARSE_CATEGORICAL_UPDATE_STATE_DOCSTRING = """Accumulates metric statistics.
For sparse categorical metrics, the shapes of `y_true` and `y_pred` are
Reported by Pylint.
Line: 235
Column: 1
result_t._metric_obj = self # pylint: disable=protected-access
return result_t
from keras.distribute import distributed_training_utils # pylint:disable=g-import-not-at-top
return distributed_training_utils.call_replica_local_fn(
replica_local_fn, *args, **kwargs)
def __str__(self):
args = ','.join(f'{k}={v}' for k, v in self.get_config().items())
Reported by Pylint.
Line: 333
Column: 3
### For use by subclasses ###
@doc_controls.for_subclass_implementers
def add_weight(
self,
name,
shape=(),
aggregation=tf.VariableAggregation.SUM,
synchronization=tf.VariableSynchronization.ON_READ,
Reported by Pylint.
Line: 347
Column: 3
else:
strategy = None
# TODO(b/120571621): Make `ON_READ` work with Keras metrics on TPU.
if backend.is_tpu_strategy(strategy):
synchronization = tf.VariableSynchronization.ON_WRITE
with tf.init_scope():
return super(Metric, self).add_weight(
Reported by Pylint.
Line: 420
Column: 3
self.count = self.add_weight(
'count', initializer='zeros')
def update_state(self, values, sample_weight=None):
"""Accumulates statistics for computing the metric.
Args:
values: Per-example value.
sample_weight: Optional weighting of each example. Defaults to 1.
Reported by Pylint.
Line: 441
Column: 7
if isinstance(values, dict):
msg += ('To return a dict of values, implement a custom Metric '
'subclass.')
raise RuntimeError(msg)
if sample_weight is not None:
sample_weight = tf.cast(sample_weight, self._dtype)
# Update dimensions of weights to match with values if possible.
values, _, sample_weight = losses_utils.squeeze_or_expand_dimensions(
values, sample_weight=sample_weight)
Reported by Pylint.
keras/engine/training_utils_v1.py
891 issues
Line: 17
Column: 1
# ==============================================================================
"""Training-related utilities."""
import tensorflow.compat.v2 as tf
import abc
import atexit
import collections
import functools
Reported by Pylint.
Line: 36
Column: 1
from keras.utils import generic_utils
from keras.utils import losses_utils
from keras.utils import tf_inspect
from tensorflow.python.platform import tf_logging as logging
def is_composite_or_composite_value(tensor):
"""Returns true if 'tensor' is a CompositeTensor or a CT Value object."""
# TODO(b/125094323): This should be isinstance(CompositeTensor) or
Reported by Pylint.
Line: 665
Column: 1
ValueError: In case of invalid user-provided argument.
"""
if x_weight is None or (isinstance(x_weight, (list, tuple)) and
len(x_weight) == 0): # pylint: disable=g-explicit-length-test
return [None for _ in output_names]
if len(output_names) == 1:
if isinstance(x_weight, (list, tuple)) and len(x_weight) == 1:
return x_weight
if isinstance(x_weight, dict) and output_names[0] in x_weight:
Reported by Pylint.
Line: 41
Column: 3
def is_composite_or_composite_value(tensor):
"""Returns true if 'tensor' is a CompositeTensor or a CT Value object."""
# TODO(b/125094323): This should be isinstance(CompositeTensor) or
# isinstance(CompositeTensorValue) once we support that.
return isinstance(
tensor,
(tf.__internal__.CompositeTensor, tf.compat.v1.SparseTensorValue,
tf.compat.v1.ragged.RaggedTensorValue))
Reported by Pylint.
Line: 220
Column: 3
(type(target), type(to_append)))
# Perform type-specific concatenation.
# TODO(b/125094323): This should be replaced by a simple call to
# target.append() that should work on all of the below classes.
# If we're seeing a CompositeTensor here, we know it's because we're in
# Eager mode (or else we'd have evaluated the CT to a CT Value object
# already). Therefore, it's safe to call concat() on it without evaluating
Reported by Pylint.
Line: 256
Column: 3
super(ConcatAggregator, self).__init__(
use_steps=True, num_samples=None, steps=None, batch_size=batch_size)
def create(self, batch_element):
self.composite = is_composite_or_composite_value(batch_element)
def aggregate(self, batch_element, batch_start=None, batch_end=None):
# TODO(psv): Add num_samples check here to detect when output batch
Reported by Pylint.
Line: 259
Column: 3
def create(self, batch_element):
self.composite = is_composite_or_composite_value(batch_element)
def aggregate(self, batch_element, batch_start=None, batch_end=None):
# TODO(psv): Add num_samples check here to detect when output batch
# #samples is < batch size and != input batch #samples.
if self.batch_size and self.batch_size < batch_element.shape[0]:
raise ValueError(
Reported by Pylint.
Line: 261
Column: 3
def aggregate(self, batch_element, batch_start=None, batch_end=None):
# TODO(psv): Add num_samples check here to detect when output batch
# #samples is < batch size and != input batch #samples.
if self.batch_size and self.batch_size < batch_element.shape[0]:
raise ValueError(
'Mismatch between expected batch size and model output batch size. '
'Output shape = {}, expected output shape = shape {}'.format(
Reported by Pylint.
Line: 277
Column: 3
self.results = self.results[0]
elif self.composite:
# TODO(taylorrobie): efficiently concatenate.
results = self.results[0]
for r in self.results[1:]:
results = _append_composite_tensor(results, r)
self.results = results
Reported by Pylint.
Line: 300
Column: 3
Returns:
The global copy threadpool.
"""
global _COPY_POOL
if _COPY_POOL is None:
_COPY_POOL = multiprocessing.pool.ThreadPool(_COPY_THREADS)
atexit.register(_COPY_POOL.close)
return _COPY_POOL
Reported by Pylint.
keras/layers/preprocessing/text_vectorization_test.py
882 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for Keras text vectorization preprocessing layer."""
import tensorflow.compat.v2 as tf
import gc
import os
from absl.testing import parameterized
Reported by Pylint.
Line: 22
Column: 1
import gc
import os
from absl.testing import parameterized
import numpy as np
import keras
from keras import backend
from keras import keras_parameterized
Reported by Pylint.
Line: 1275
Column: 1
["ohio", "fire", "earth", "michigan"]])
# pyformat: disable
# pylint: disable=bad-whitespace
expected_output = [[ 0, .8, .25, .75, 0, 0],
[ 1, .4, 0, 0, .6, 0]]
# pylint: enable=bad-whitespace
# pyformat: enable
max_tokens = 6
Reported by Pylint.
Line: 1278
Column: 1
# pylint: disable=bad-whitespace
expected_output = [[ 0, .8, .25, .75, 0, 0],
[ 1, .4, 0, 0, .6, 0]]
# pylint: enable=bad-whitespace
# pyformat: enable
max_tokens = 6
expected_output_shape = [None, max_tokens]
input_data = keras.Input(shape=(None,), dtype=tf.string)
Reported by Pylint.
Line: 1306
Column: 1
["ohio", "fire", "earth", "michigan"]])
# pyformat: disable
# pylint: disable=bad-whitespace
expected_output = [[ 0, .8, .25, .75, 0],
[ 1, .4, 0, 0, .6]]
# pylint: enable=bad-whitespace
# pyformat: enable
max_tokens = 5
Reported by Pylint.
Line: 1309
Column: 1
# pylint: disable=bad-whitespace
expected_output = [[ 0, .8, .25, .75, 0],
[ 1, .4, 0, 0, .6]]
# pylint: enable=bad-whitespace
# pyformat: enable
max_tokens = 5
expected_output_shape = [None, max_tokens]
input_data = keras.Input(shape=(None,), dtype=tf.string)
Reported by Pylint.
Line: 1336
Column: 1
["ohio", "fire", "earth", "michigan"]])
# pyformat: disable
# pylint: disable=bad-whitespace
expected_output = [[ 0, .8, .25, .75, 0],
[ .2, .4, 0, 0, .6]]
# pylint: enable=bad-whitespace
# pyformat: enable
max_tokens = 5
Reported by Pylint.
Line: 1339
Column: 1
# pylint: disable=bad-whitespace
expected_output = [[ 0, .8, .25, .75, 0],
[ .2, .4, 0, 0, .6]]
# pylint: enable=bad-whitespace
# pyformat: enable
max_tokens = 5
expected_output_shape = [None, max_tokens]
input_data = keras.Input(shape=(None,), dtype=tf.string)
Reported by Pylint.
Line: 1733
Column: 1
["ohio", "fire", "earth", "michigan"]])
# pyformat: disable
# pylint: disable=bad-whitespace
expected_output = [[ 0, .8, .25, .75, 0],
[ 1, .4, 0, 0, .6]]
vocab_data = ["earth", "wind", "and", "fire"]
# pylint: enable=bad-whitespace
# pyformat: enable
Reported by Pylint.
Line: 1737
Column: 1
expected_output = [[ 0, .8, .25, .75, 0],
[ 1, .4, 0, 0, .6]]
vocab_data = ["earth", "wind", "and", "fire"]
# pylint: enable=bad-whitespace
# pyformat: enable
# Build and validate a golden model.
input_data = keras.Input(shape=(None,), dtype=tf.string)
layer = text_vectorization.TextVectorization(
Reported by Pylint.
keras/engine/data_adapter.py
855 issues
Line: 17
Column: 1
# ==============================================================================
"""Adapter module that convert different input data objects into tf.dataset."""
import tensorflow.compat.v2 as tf
import abc
import contextlib
import functools
import itertools
Reported by Pylint.
Line: 27
Column: 1
import random
import numpy as np
from tensorflow.python.eager import context
from keras import backend
from keras.engine import training_utils
from keras.utils import data_utils
from keras.utils import dataset_creator
from keras.utils import tf_utils
Reported by Pylint.
Line: 33
Column: 1
from keras.utils import data_utils
from keras.utils import dataset_creator
from keras.utils import tf_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
try:
import pandas as pd # pylint: disable=g-import-not-at-top
except ImportError:
Reported by Pylint.
Line: 34
Column: 1
from keras.utils import dataset_creator
from keras.utils import tf_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
try:
import pandas as pd # pylint: disable=g-import-not-at-top
except ImportError:
pd = None
Reported by Pylint.
Line: 37
Column: 1
from tensorflow.python.util.tf_export import keras_export
try:
import pandas as pd # pylint: disable=g-import-not-at-top
except ImportError:
pd = None
keras_data_adapter_gauge = tf.__internal__.monitoring.BoolGauge(
"/tensorflow/api/keras/data_adapters", "keras data adapter usage", "method")
Reported by Pylint.
Line: 1665
Column: 1
def _is_scipy_sparse(x):
try:
from scipy.sparse import issparse # pylint: disable=g-import-not-at-top
return issparse(x)
except ImportError:
return False
Reported by Pylint.
Line: 201
Column: 5
def on_epoch_end(self):
"""A hook called after each epoch."""
pass
class TensorLikeDataAdapter(DataAdapter):
"""Adapter that handles Tensor-like objects, e.g. EagerTensor and NumPy."""
Reported by Pylint.
Line: 209
Column: 3
@staticmethod
def can_handle(x, y=None):
# TODO(kaftan): Check performance implications of using a flatten
# here for other types of inputs.
flat_inputs = tf.nest.flatten(x)
if y is not None:
flat_inputs += tf.nest.flatten(y)
Reported by Pylint.
Line: 987
Column: 3
"""Selects a data adapter than can handle a given x and y."""
adapter_cls = [cls for cls in ALL_ADAPTER_CLS if cls.can_handle(x, y)]
if not adapter_cls:
# TODO(scottzhu): This should be a less implementation-specific error.
raise ValueError(
"Failed to find data adapter that can handle "
"input: {}, {}".format(
_type_name(x), _type_name(y)))
elif len(adapter_cls) > 1:
Reported by Pylint.
Line: 1089
Column: 9
"sample_weight modes were coerced from\n {}\n to \n {}"
.format(target_str, mode_str))
except (ValueError, TypeError):
raise ValueError(
"Unable to match target structure and sample_weight_modes "
"structure:\n {}\n to \n {}".format(target_str, mode_str))
return sample_weight_modes
Reported by Pylint.