The following issues were found
keras/mixed_precision/autocast_variable.py
342 issues
Line: 17
Column: 1
# ==============================================================================
"""Contains AutoCastVariable, a variable which automatically casts itself."""
import tensorflow.compat.v2 as tf
import threading
from keras.distribute import distributed_training_utils
Reported by Pylint.
Line: 150
Column: 5
def _should_act_as_resource_variable(self):
"""Pass resource_variable_ops.is_resource_variable check."""
pass
def __repr__(self):
if tf.executing_eagerly() and not self._in_graph_mode:
repr_str = ("<AutoCastVariable '{v.name}' shape={v.shape} "
'dtype={v.dtype.name} dtype_to_cast_to={v._cast_dtype.name}, '
Reported by Pylint.
Line: 213
Column: 3
use_locking=None,
name=None,
read_value=True):
# TODO(b/146181571): This logic can be simplified once
# DistributedVariable.assign returns a DistributedVariable. Currently for
# MirroredStrategy, it returns a Mirrored value.
if tf.compat.v1.executing_eagerly_outside_functions():
assign_op = update_fn(value, use_locking, name, False)
if read_value:
Reported by Pylint.
Line: 355
Column: 3
obj_map[self] = obj_map[self._variable]
return obj_map, resource_map
# TODO(reedwm): Maybe encode the fact the variable is an AutoCastVariable in
# to_proto().
def to_proto(self, export_scope=None):
return self._variable.to_proto(export_scope)
def from_proto(self, variable_def, import_scope=None):
Reported by Pylint.
Line: 369
Column: 3
# https://github.com/tensorflow/tensorflow/blob/db26bd574fa95b5bdd53c08463dd19407cc0297e/tensorflow/python/keras/saving/saved_model/load.py#L211
# We need to expose these attributes on AutoCastVariable as well for
# SavedModel to work properly.
# TODO(reedwm/kathywu): Find a better way to support SavedModel. Exposing
# private attributes is hacky and difficult to maintain.
@property
def _handle_name(self):
return self._variable._handle_name # pylint: disable=protected-access
Reported by Pylint.
Line: 544
Column: 5
self._dtype = dtype
def __enter__(self):
self._prev_dtype = getattr(_autocast_dtype, 'dtype', None)
_autocast_dtype.dtype = self._dtype
def __exit__(self, type_arg, value_arg, traceback_arg):
_autocast_dtype.dtype = self._prev_dtype
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import threading
from keras.distribute import distributed_training_utils
# _autocast_dtype.dtype is the dtype AutoCastVariables should be cast to, or
# None if AutoCastVariables should not be cast.
Reported by Pylint.
Line: 29
Column: 1
def numpy_text(tensor, is_repr=False):
"""Human readable representation of a tensor's numpy value."""
if tensor.dtype.is_numpy_compatible:
# pylint: disable=protected-access
text = repr(tensor._numpy()) if is_repr else str(tensor._numpy())
# pylint: enable=protected-access
else:
Reported by Pylint.
Line: 30
Column: 1
def numpy_text(tensor, is_repr=False):
"""Human readable representation of a tensor's numpy value."""
if tensor.dtype.is_numpy_compatible:
# pylint: disable=protected-access
text = repr(tensor._numpy()) if is_repr else str(tensor._numpy())
# pylint: enable=protected-access
else:
text = '<unprintable>'
Reported by Pylint.
Line: 32
Column: 1
"""Human readable representation of a tensor's numpy value."""
if tensor.dtype.is_numpy_compatible:
# pylint: disable=protected-access
text = repr(tensor._numpy()) if is_repr else str(tensor._numpy())
# pylint: enable=protected-access
else:
text = '<unprintable>'
if '\n' in text:
text = '\n' + text
Reported by Pylint.
keras/engine/training_distributed_v1.py
339 issues
Line: 17
Column: 1
# ==============================================================================
"""Part of the Keras training engine related to distributed training."""
import tensorflow.compat.v2 as tf
# pylint: disable=protected-access
import numpy as np
from tensorflow.python.distribute import input_lib
from keras import backend
Reported by Pylint.
Line: 21
Column: 1
# pylint: disable=protected-access
import numpy as np
from tensorflow.python.distribute import input_lib
from keras import backend
from keras import callbacks as cbks
from keras.distribute import distribute_coordinator_utils as dc
from keras.distribute import distributed_training_utils_v1 as dist_utils
from keras.engine import partial_batch_padding_handler as padding_util
Reported by Pylint.
Line: 31
Column: 1
from keras.engine import training_utils_v1
from keras.utils.generic_utils import Progbar
from keras.utils.mode_keys import ModeKeys
from tensorflow.python.platform import tf_logging as logging
def _per_replica_execution_function(model, mode):
exec_func = model._make_execution_function(mode)
return (exec_func.inputs, exec_func.outputs, exec_func.updates_op,
Reported by Pylint.
Line: 168
Column: 3
scope = dist_utils.distributed_scope(
strategy=current_strategy, learning_phase=1)
scope.__enter__()
out_labels = model.metrics_names or []
step_fn = _make_train_step_fn(model, ModeKeys.TRAIN, current_strategy,
out_labels)
Reported by Pylint.
Line: 278
Column: 3
if model._compile_distribution:
# Copy the weights back from the replicated model to the original model.
dist_utils._copy_weights_to_original_model(model, ModeKeys.TRAIN)
scope.__exit__(None, None, None)
return model.history
def experimental_tpu_test_loop(model,
dataset,
Reported by Pylint.
Line: 310
Column: 3
scope = dist_utils.distributed_scope(
strategy=current_strategy, learning_phase=0)
scope.__enter__()
out_labels = model.metrics_names
def _test_step_fn(inputs):
"""A fn that returns output of single test step."""
Reported by Pylint.
Line: 404
Column: 3
progbar.update(target_steps)
callbacks._call_end_hook(mode)
scope.__exit__(None, None, None)
if len(outs) >= 0:
outs[0] /= (target_steps)
if len(outs) == 1:
return outs[0]
Reported by Pylint.
Line: 463
Column: 3
scope = dist_utils.distributed_scope(
strategy=current_strategy, learning_phase=0)
scope.__enter__()
def _predict_step_fn(inputs):
"""A fn that returns output of single prediction step."""
(tf.distribute.get_replica_context().merge_call(
Reported by Pylint.
Line: 553
Column: 3
callbacks._call_end_hook(mode)
scope.__exit__(None, None, None)
if len(unconcatenated_outs) == 1:
prediction_result = np.concatenate(unconcatenated_outs[0], axis=0)
else:
prediction_result = [
Reported by Pylint.
Line: 104
Column: 3
reduce_op = tf.distribute.ReduceOp.MEAN
ctx.set_last_step_output(label, output, reduce_op)
# TODO(priyag, sourabhbajaj): Ignoring these things from the combined_fn:
# feed_dict, session kwargs, run options, run_metadata for now. These should
# be handled appropriately
return combined_fn.updates_op
return _step_fn
Reported by Pylint.
keras/saving/saved_model/save_impl.py
331 issues
Line: 25
Column: 1
import threading
import weakref
from keras import backend as K
from keras.engine import base_layer_utils
from keras.engine import input_spec
from keras.mixed_precision import autocast_variable
from keras.saving import saving_utils
from keras.saving.saved_model import constants
Reported by Pylint.
Line: 26
Column: 1
import weakref
from keras import backend as K
from keras.engine import base_layer_utils
from keras.engine import input_spec
from keras.mixed_precision import autocast_variable
from keras.saving import saving_utils
from keras.saving.saved_model import constants
from keras.saving.saved_model import load as keras_load
Reported by Pylint.
Line: 27
Column: 1
from keras import backend as K
from keras.engine import base_layer_utils
from keras.engine import input_spec
from keras.mixed_precision import autocast_variable
from keras.saving import saving_utils
from keras.saving.saved_model import constants
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import serialized_attributes
Reported by Pylint.
Line: 28
Column: 1
from keras import backend as K
from keras.engine import base_layer_utils
from keras.engine import input_spec
from keras.mixed_precision import autocast_variable
from keras.saving import saving_utils
from keras.saving.saved_model import constants
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import serialized_attributes
from keras.saving.saved_model import utils
Reported by Pylint.
Line: 29
Column: 1
from keras.engine import base_layer_utils
from keras.engine import input_spec
from keras.mixed_precision import autocast_variable
from keras.saving import saving_utils
from keras.saving.saved_model import constants
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import serialized_attributes
from keras.saving.saved_model import utils
from keras.utils import tf_contextlib
Reported by Pylint.
Line: 30
Column: 1
from keras.engine import input_spec
from keras.mixed_precision import autocast_variable
from keras.saving import saving_utils
from keras.saving.saved_model import constants
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import serialized_attributes
from keras.saving.saved_model import utils
from keras.utils import tf_contextlib
from keras.utils import tf_inspect
Reported by Pylint.
Line: 31
Column: 1
from keras.mixed_precision import autocast_variable
from keras.saving import saving_utils
from keras.saving.saved_model import constants
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import serialized_attributes
from keras.saving.saved_model import utils
from keras.utils import tf_contextlib
from keras.utils import tf_inspect
from keras.utils import tf_utils
Reported by Pylint.
Line: 32
Column: 1
from keras.saving import saving_utils
from keras.saving.saved_model import constants
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import serialized_attributes
from keras.saving.saved_model import utils
from keras.utils import tf_contextlib
from keras.utils import tf_inspect
from keras.utils import tf_utils
from keras.utils import version_utils
Reported by Pylint.
Line: 33
Column: 1
from keras.saving.saved_model import constants
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import serialized_attributes
from keras.saving.saved_model import utils
from keras.utils import tf_contextlib
from keras.utils import tf_inspect
from keras.utils import tf_utils
from keras.utils import version_utils
from keras.utils.generic_utils import LazyLoader
Reported by Pylint.
Line: 34
Column: 1
from keras.saving.saved_model import load as keras_load
from keras.saving.saved_model import serialized_attributes
from keras.saving.saved_model import utils
from keras.utils import tf_contextlib
from keras.utils import tf_inspect
from keras.utils import tf_utils
from keras.utils import version_utils
from keras.utils.generic_utils import LazyLoader
import tensorflow.compat.v1.logging as logging
Reported by Pylint.
keras/engine/training_utils_v1_test.py
329 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for training utility functions."""
import tensorflow.compat.v2 as tf
import functools
import multiprocessing.pool
import time
Reported by Pylint.
Line: 23
Column: 1
import multiprocessing.pool
import time
from absl.testing import parameterized
import numpy as np
from keras import backend
from keras import keras_parameterized
from keras import testing_utils
from keras.engine import keras_tensor
Reported by Pylint.
Line: 30
Column: 1
from keras import testing_utils
from keras.engine import keras_tensor
from keras.engine import training_utils_v1
from tensorflow.python.platform import tf_logging as logging
class ModelInputsTest(tf.test.TestCase):
def test_single_thing(self):
Reported by Pylint.
Line: 99
Column: 1
class DatasetUtilsTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
# pylint: disable=g-long-lambda
('Batch', lambda: tf.data.Dataset.range(5).batch(2)),
('Cache', lambda: tf.data.Dataset.range(5).cache()),
('Concatenate', lambda: tf.data.Dataset.range(5).concatenate(
tf.data.Dataset.range(5))),
('FlatMap', lambda: tf.data.Dataset.range(5).flat_map(
Reported by Pylint.
Line: 140
Column: 1
('TFRecordDataset', lambda: tf.data.TFRecordDataset([])),
('Window', lambda: tf.data.Dataset.range(5).window(2)),
('Zip', lambda: tf.data.Dataset.zip(tf.data.Dataset.range(5))),
# pylint: enable=g-long-lambda
)
def test_verify_dataset_shuffled(self, dataset_fn, expect_shuffled=False):
dataset = dataset_fn()
if not expect_shuffled:
Reported by Pylint.
Line: 199
Column: 1
class_weight=class_weight)
class MonitoredPool(multiprocessing.pool.ThreadPool):
def __init__(self, *args, **kwargs):
self._apply_counter = 0
self._func_wrapper = None
super(MonitoredPool, self).__init__(*args, **kwargs)
Reported by Pylint.
Line: 242
Column: 22
def setUp(self):
super(AggregationTest, self).setUp()
self._old_pool = training_utils_v1._COPY_POOL
self._old_threshold = (
training_utils_v1.SliceAggregator._BINARY_SIZE_THRESHOLD)
self._old_timeout = training_utils_v1.SliceAggregator._MAX_COPY_SECONDS
training_utils_v1._COPY_POOL = MonitoredPool(
training_utils_v1._COPY_THREADS)
Reported by Pylint.
Line: 244
Column: 9
super(AggregationTest, self).setUp()
self._old_pool = training_utils_v1._COPY_POOL
self._old_threshold = (
training_utils_v1.SliceAggregator._BINARY_SIZE_THRESHOLD)
self._old_timeout = training_utils_v1.SliceAggregator._MAX_COPY_SECONDS
training_utils_v1._COPY_POOL = MonitoredPool(
training_utils_v1._COPY_THREADS)
def tearDown(self):
Reported by Pylint.
Line: 245
Column: 25
self._old_pool = training_utils_v1._COPY_POOL
self._old_threshold = (
training_utils_v1.SliceAggregator._BINARY_SIZE_THRESHOLD)
self._old_timeout = training_utils_v1.SliceAggregator._MAX_COPY_SECONDS
training_utils_v1._COPY_POOL = MonitoredPool(
training_utils_v1._COPY_THREADS)
def tearDown(self):
super(AggregationTest, self).tearDown()
Reported by Pylint.
Line: 246
Column: 5
self._old_threshold = (
training_utils_v1.SliceAggregator._BINARY_SIZE_THRESHOLD)
self._old_timeout = training_utils_v1.SliceAggregator._MAX_COPY_SECONDS
training_utils_v1._COPY_POOL = MonitoredPool(
training_utils_v1._COPY_THREADS)
def tearDown(self):
super(AggregationTest, self).tearDown()
training_utils_v1._COPY_POOL = self._old_pool
Reported by Pylint.
keras/layers/lstm_test.py
327 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for LSTM layer."""
import tensorflow.compat.v2 as tf
import copy
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 21
Column: 1
import copy
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
from keras import testing_utils
Reported by Pylint.
Line: 204
Column: 22
output = layer(inputs, initial_state=initial_state)
self.assertTrue(
any(initial_state[0] is t
for t in layer._inbound_nodes[0].input_tensors))
model = keras.models.Model([inputs] + initial_state, output)
model.compile(
loss='categorical_crossentropy',
optimizer=tf.compat.v1.train.AdamOptimizer(),
Reported by Pylint.
Line: 348
Column: 22
output = layer(inputs)
self.assertTrue(
any(initial_state[0] is t
for t in layer._inbound_nodes[0].input_tensors))
model = keras.models.Model(inputs, output)
model.compile(
loss='categorical_crossentropy',
optimizer=tf.compat.v1.train.AdamOptimizer(),
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import copy
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 30
Column: 1
@keras_parameterized.run_all_keras_modes
class LSTMLayerTest(keras_parameterized.TestCase):
def test_return_sequences_LSTM(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
Reported by Pylint.
Line: 30
Column: 1
@keras_parameterized.run_all_keras_modes
class LSTMLayerTest(keras_parameterized.TestCase):
def test_return_sequences_LSTM(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
Reported by Pylint.
Line: 32
Column: 3
@keras_parameterized.run_all_keras_modes
class LSTMLayerTest(keras_parameterized.TestCase):
def test_return_sequences_LSTM(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
Reported by Pylint.
Line: 32
Column: 1
@keras_parameterized.run_all_keras_modes
class LSTMLayerTest(keras_parameterized.TestCase):
def test_return_sequences_LSTM(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
Reported by Pylint.
Line: 32
Column: 3
@keras_parameterized.run_all_keras_modes
class LSTMLayerTest(keras_parameterized.TestCase):
def test_return_sequences_LSTM(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
Reported by Pylint.
keras/utils/metrics_utils.py
326 issues
Line: 18
Column: 1
# pylint: disable=protected-access
"""Utils related to keras metrics."""
import tensorflow.compat.v2 as tf
import functools
import weakref
from enum import Enum
Reported by Pylint.
Line: 72
Column: 5
'the metric was not created in TPUStrategy scope. '
'Make sure the keras Metric is created in TPUstrategy scope. ')
with tf_utils.graph_context_for_symbolic_tensors(*args, **kwargs):
update_op = update_state_fn(*args, **kwargs)
if update_op is not None: # update_op will be None in eager execution.
metric_obj.add_update(update_op)
return update_op
Reported by Pylint.
Line: 844
Column: 1
if len(splits_list) != len(nested_splits_lists[0]):
raise ValueError(error_msg)
return [
tf.compat.v1.assert_equal(s1, s2, message=error_msg) # pylint: disable=g-complex-comprehension
for splits_list in nested_splits_lists[1:]
for (s1, s2) in zip(nested_splits_lists[0], splits_list)
]
Reported by Pylint.
Line: 147
Column: 13
try:
result_t = tf.identity(raw_result)
except (ValueError, TypeError):
raise RuntimeError(
'The output of `metric.result()` can only be a single '
'Tensor/Variable, or a dict of Tensors/Variables. '
f'For metric {metric_obj.name}, got result {raw_result}.')
else:
# TODO(psv): Test distribution of metrics using different distribution
Reported by Pylint.
Line: 152
Column: 3
'Tensor/Variable, or a dict of Tensors/Variables. '
f'For metric {metric_obj.name}, got result {raw_result}.')
else:
# TODO(psv): Test distribution of metrics using different distribution
# strategies.
# Creating a wrapper for merge_fn. merge_call invokes the given merge_fn
# with distribution object as the first parameter. We create a wrapper
# here so that the result function need not have that parameter.
Reported by Pylint.
Line: 622
Column: 7
y_pred, y_true)
else:
sample_weight = tf.cast(sample_weight, dtype=variable_dtype)
y_pred, y_true, sample_weight = (
losses_utils.squeeze_or_expand_dimensions(
y_pred, y_true, sample_weight=sample_weight))
y_pred.shape.assert_is_compatible_with(y_true.shape)
if top_k is not None:
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
import functools
import weakref
from enum import Enum
import numpy as np
Reported by Pylint.
Line: 21
Column: 1
import tensorflow.compat.v2 as tf
import functools
import weakref
from enum import Enum
import numpy as np
from keras import backend
Reported by Pylint.
Line: 23
Column: 1
import functools
import weakref
from enum import Enum
import numpy as np
from keras import backend
from keras.utils import losses_utils
from keras.utils import tf_utils
Reported by Pylint.
Line: 35
Column: 1
class Reduction(Enum):
"""Types of metrics reduction.
Contains the following values:
* `SUM`: Scalar sum of weighted values.
* `SUM_OVER_BATCH_SIZE`: Scalar sum of weighted values divided by
Reported by Pylint.
keras/tests/model_subclassing_compiled_test.py
326 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for compiled Model subclassing."""
import tensorflow.compat.v2 as tf
import os
import numpy as np
Reported by Pylint.
Line: 23
Column: 1
import numpy as np
import keras
from keras import keras_parameterized
from keras import testing_utils
from keras.tests import model_subclassing_test_util as model_util
try:
Reported by Pylint.
Line: 24
Column: 1
import numpy as np
import keras
from keras import keras_parameterized
from keras import testing_utils
from keras.tests import model_subclassing_test_util as model_util
try:
import h5py # pylint:disable=g-import-not-at-top
Reported by Pylint.
Line: 25
Column: 1
import keras
from keras import keras_parameterized
from keras import testing_utils
from keras.tests import model_subclassing_test_util as model_util
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
Reported by Pylint.
Line: 26
Column: 1
import keras
from keras import keras_parameterized
from keras import testing_utils
from keras.tests import model_subclassing_test_util as model_util
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
Reported by Pylint.
Line: 29
Column: 1
from keras.tests import model_subclassing_test_util as model_util
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
@keras_parameterized.run_all_keras_modes
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import os
import numpy as np
import keras
from keras import keras_parameterized
Reported by Pylint.
Line: 29
Column: 1
from keras.tests import model_subclassing_test_util as model_util
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
@keras_parameterized.run_all_keras_modes
Reported by Pylint.
Line: 31
Column: 1
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
@keras_parameterized.run_all_keras_modes
class ModelSubclassCompiledTest(keras_parameterized.TestCase):
Reported by Pylint.
Line: 35
Column: 1
@keras_parameterized.run_all_keras_modes
class ModelSubclassCompiledTest(keras_parameterized.TestCase):
def test_single_io_workflow_with_np_arrays(self):
num_classes = 2
num_samples = 100
input_dim = 50
Reported by Pylint.
keras/engine/base_layer_utils.py
325 issues
Line: 17
Column: 1
# ==============================================================================
"""Contains private utilities used mainly by the base Layer class."""
import tensorflow.compat.v2 as tf
import functools
import threading
from keras import backend
from keras.utils import control_flow_util
Reported by Pylint.
Line: 25
Column: 1
from keras.utils import control_flow_util
from keras.utils import tf_inspect
from keras.utils import tf_utils
from tensorflow.python.util.tf_export import keras_export
_call_context = threading.local()
def create_mean_metric(value, name=None):
Reported by Pylint.
Line: 33
Column: 1
def create_mean_metric(value, name=None):
# import keras will import base_layer and then this module, and metric relies
# on base_layer, which result into a cyclic dependency.
from keras import metrics as metrics_module # pylint: disable=g-import-not-at-top
metric_obj = metrics_module.Mean(name=name, dtype=value.dtype)
return metric_obj, metric_obj(value)
def make_variable(name,
Reported by Pylint.
Line: 212
Column: 1
# Import of `base_layer` needed in order to create `TensorFlowOpLayer`.
# Cannot be imported at top because of circular dependencies.
# TODO(omalleyt): Resolve circular dependency.
from keras.engine import base_layer # pylint: disable=g-import-not-at-top
tensor_list = tf.nest.flatten(tensors)
sparse_ops = []
ragged_tensors = []
for tensor in tensor_list:
if getattr(tensor, '_keras_history', None) is not None:
Reported by Pylint.
Line: 211
Column: 3
'`create_keras_history` should only be called if eager is disabled!')
# Import of `base_layer` needed in order to create `TensorFlowOpLayer`.
# Cannot be imported at top because of circular dependencies.
# TODO(omalleyt): Resolve circular dependency.
from keras.engine import base_layer # pylint: disable=g-import-not-at-top
tensor_list = tf.nest.flatten(tensors)
sparse_ops = []
ragged_tensors = []
for tensor in tensor_list:
Reported by Pylint.
Line: 452
Column: 3
'training': None,
'saving': None
}
# TODO(b/150169018): This logic can be replaced after the Functional API
# refactor.
self._in_keras_graph = False
def enter(self, layer, inputs, build_graph, training, saving=None):
"""Push a Layer and its inputs and state onto the current call context.
Reported by Pylint.
Line: 525
Column: 5
def __enter__(self):
call_ctx = self._call_ctx
self._prev_in_call = call_ctx.in_call
self._prev_state = call_ctx._state
call_ctx.in_call = True
call_ctx._state = self._state
Reported by Pylint.
Line: 526
Column: 5
def __enter__(self):
call_ctx = self._call_ctx
self._prev_in_call = call_ctx.in_call
self._prev_state = call_ctx._state
call_ctx.in_call = True
call_ctx._state = self._state
# TODO(b/150169018): This logic can be removed after the Functional API
Reported by Pylint.
Line: 531
Column: 3
call_ctx.in_call = True
call_ctx._state = self._state
# TODO(b/150169018): This logic can be removed after the Functional API
# refactor.
if self._build_graph:
self._prev_in_keras_graph = call_ctx._in_keras_graph
call_ctx._in_keras_graph = (
call_ctx._in_keras_graph or
Reported by Pylint.
Line: 534
Column: 7
# TODO(b/150169018): This logic can be removed after the Functional API
# refactor.
if self._build_graph:
self._prev_in_keras_graph = call_ctx._in_keras_graph
call_ctx._in_keras_graph = (
call_ctx._in_keras_graph or
getattr(backend.get_graph(), 'name', None) == 'keras_graph')
def __exit__(self, *exc_info):
Reported by Pylint.
keras/layers/local_test.py
321 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for locally-connected layers."""
import tensorflow.compat.v2 as tf
import os
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
import os
from absl.testing import parameterized
import numpy as np
import keras
from tensorflow.python.framework import test_util as tf_test_util
from keras import combinations
Reported by Pylint.
Line: 24
Column: 1
import numpy as np
import keras
from tensorflow.python.framework import test_util as tf_test_util
from keras import combinations
from keras import testing_utils
from keras.optimizer_v2 import rmsprop
from tensorflow.python.training.rmsprop import RMSPropOptimizer
Reported by Pylint.
Line: 28
Column: 1
from keras import combinations
from keras import testing_utils
from keras.optimizer_v2 import rmsprop
from tensorflow.python.training.rmsprop import RMSPropOptimizer
_DATA_FORMAT_PADDING_IMPLEMENTATION = [{
'data_format': 'channels_first',
'padding': 'valid',
Reported by Pylint.
Line: 544
Column: 28
for _ in range(layers):
model.add(lc_layer(
padding='valid',
kernel_initializer=keras.initializers.random_normal(),
bias_initializer=keras.initializers.random_normal(),
filters=filters,
strides=strides,
kernel_size=kernel_size,
activation=keras.activations.relu,
Reported by Pylint.
Line: 545
Column: 26
model.add(lc_layer(
padding='valid',
kernel_initializer=keras.initializers.random_normal(),
bias_initializer=keras.initializers.random_normal(),
filters=filters,
strides=strides,
kernel_size=kernel_size,
activation=keras.activations.relu,
data_format=data_format,
Reported by Pylint.
Line: 578
Column: 32
model.add(
lc_layer(
padding='valid',
kernel_initializer=keras.initializers.random_normal(),
bias_initializer=keras.initializers.random_normal(),
filters=filters,
strides=strides,
kernel_size=kernel_size,
activation=keras.activations.relu,
Reported by Pylint.
Line: 579
Column: 30
lc_layer(
padding='valid',
kernel_initializer=keras.initializers.random_normal(),
bias_initializer=keras.initializers.random_normal(),
filters=filters,
strides=strides,
kernel_size=kernel_size,
activation=keras.activations.relu,
data_format=data_format,
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import os
from absl.testing import parameterized
import numpy as np
import keras
from tensorflow.python.framework import test_util as tf_test_util
Reported by Pylint.
Line: 28
Column: 1
from keras import combinations
from keras import testing_utils
from keras.optimizer_v2 import rmsprop
from tensorflow.python.training.rmsprop import RMSPropOptimizer
_DATA_FORMAT_PADDING_IMPLEMENTATION = [{
'data_format': 'channels_first',
'padding': 'valid',
Reported by Pylint.
keras/losses.py
316 issues
Line: 15
Column: 1
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-classes-have-attributes
"""Built-in loss functions."""
import tensorflow.compat.v2 as tf
import abc
Reported by Pylint.
Line: 18
Column: 1
# pylint: disable=g-classes-have-attributes
"""Built-in loss functions."""
import tensorflow.compat.v2 as tf
import abc
import functools
from keras import backend
from keras.utils import losses_utils
Reported by Pylint.
Line: 27
Column: 1
from keras.utils import tf_utils
from keras.utils.generic_utils import deserialize_keras_object
from keras.utils.generic_utils import serialize_keras_object
from tensorflow.python.ops.ragged import ragged_map_ops
from tensorflow.python.ops.ragged import ragged_util
from tensorflow.python.util import dispatch
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
Reported by Pylint.
Line: 28
Column: 1
from keras.utils.generic_utils import deserialize_keras_object
from keras.utils.generic_utils import serialize_keras_object
from tensorflow.python.ops.ragged import ragged_map_ops
from tensorflow.python.ops.ragged import ragged_util
from tensorflow.python.util import dispatch
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
Reported by Pylint.
Line: 29
Column: 1
from keras.utils.generic_utils import serialize_keras_object
from tensorflow.python.ops.ragged import ragged_map_ops
from tensorflow.python.ops.ragged import ragged_util
from tensorflow.python.util import dispatch
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
@keras_export('keras.losses.Loss')
Reported by Pylint.
Line: 30
Column: 1
from tensorflow.python.ops.ragged import ragged_map_ops
from tensorflow.python.ops.ragged import ragged_util
from tensorflow.python.util import dispatch
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
@keras_export('keras.losses.Loss')
class Loss:
Reported by Pylint.
Line: 31
Column: 1
from tensorflow.python.ops.ragged import ragged_util
from tensorflow.python.util import dispatch
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
@keras_export('keras.losses.Loss')
class Loss:
"""Loss base class.
Reported by Pylint.
Line: 136
Column: 5
# accepted in scope name.
graph_ctx = tf_utils.graph_context_for_symbolic_tensors(
y_true, y_pred, sample_weight)
with backend.name_scope(self._name_scope), graph_ctx:
if tf.executing_eagerly():
call_fn = self.call
else:
call_fn = tf.__internal__.autograph.tf_convert(self.call, tf.__internal__.autograph.control_status_ctx())
losses = call_fn(y_true, y_pred)
Reported by Pylint.
Line: 2059
Column: 15
def is_categorical_crossentropy(loss):
result = ((isinstance(loss, CategoricalCrossentropy) or
(isinstance(loss, LossFunctionWrapper) and
loss.fn == categorical_crossentropy) or
(hasattr(loss, '__name__') and
loss.__name__ == 'categorical_crossentropy') or
(loss == 'categorical_crossentropy')))
return result
Reported by Pylint.
Line: 1
Column: 1
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
Reported by Pylint.