The following issues were found
keras/callbacks.py
1349 issues
Line: 15
Column: 1
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-import-not-at-top
# pylint: disable=g-classes-have-attributes
"""Callbacks: utilities called at certain points during model training."""
import tensorflow.compat.v2 as tf
Reported by Pylint.
Line: 16
Column: 1
# limitations under the License.
# ==============================================================================
# pylint: disable=g-import-not-at-top
# pylint: disable=g-classes-have-attributes
"""Callbacks: utilities called at certain points during model training."""
import tensorflow.compat.v2 as tf
import collections
Reported by Pylint.
Line: 19
Column: 1
# pylint: disable=g-classes-have-attributes
"""Callbacks: utilities called at certain points during model training."""
import tensorflow.compat.v2 as tf
import collections
import copy
import csv
import json
Reported by Pylint.
Line: 42
Column: 1
from keras.utils.generic_utils import Progbar
from keras.utils.io_utils import path_to_string
from keras.utils.mode_keys import ModeKeys
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
try:
import requests
Reported by Pylint.
Line: 43
Column: 1
from keras.utils.io_utils import path_to_string
from keras.utils.mode_keys import ModeKeys
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
try:
import requests
except ImportError:
Reported by Pylint.
Line: 44
Column: 1
from keras.utils.mode_keys import ModeKeys
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
try:
import requests
except ImportError:
requests = None
Reported by Pylint.
Line: 634
Column: 1
"""
def __init__(self):
self.validation_data = None # pylint: disable=g-missing-from-attributes
self.model = None
# Whether this Callback should only run on the chief worker in a
# Multi-Worker setting.
# TODO(omalleyt): Make this attr public once solution is stable.
self._chief_worker_only = None
Reported by Pylint.
Line: 2280
Column: 5
def _configure_embeddings(self):
"""Configure the Projector for embeddings."""
# TODO(omalleyt): Add integration tests.
from google.protobuf import text_format
from keras.layers import embeddings
from keras.protobuf import projector_config_pb2
config = projector_config_pb2.ProjectorConfig()
for layer in self.model.layers:
Reported by Pylint.
Line: 2282
Column: 5
# TODO(omalleyt): Add integration tests.
from google.protobuf import text_format
from keras.layers import embeddings
from keras.protobuf import projector_config_pb2
config = projector_config_pb2.ProjectorConfig()
for layer in self.model.layers:
if isinstance(layer, embeddings.Embedding):
embedding = config.embeddings.add()
Reported by Pylint.
Line: 638
Column: 3
self.model = None
# Whether this Callback should only run on the chief worker in a
# Multi-Worker setting.
# TODO(omalleyt): Make this attr public once solution is stable.
self._chief_worker_only = None
self._supports_tf_logs = False
def set_params(self, params):
self.params = params
Reported by Pylint.
keras/metrics_confusion_matrix_test.py
1333 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for Keras metrics functions."""
import tensorflow.compat.v2 as tf
import json
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 21
Column: 1
import json
from absl.testing import parameterized
import numpy as np
from keras import combinations
from keras import layers
from keras import metrics
from keras import models
Reported by Pylint.
Line: 28
Column: 1
from keras import metrics
from keras import models
from keras.utils import metrics_utils
from tensorflow.python.platform import tf_logging
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
class FalsePositivesTest(tf.test.TestCase, parameterized.TestCase):
Reported by Pylint.
Line: 1480
Column: 1
def test_extra_dims(self):
try:
from scipy import special # pylint: disable=g-import-not-at-top
self.setup()
logits = special.expit(-np.array([[[-10., 10., -10.], [10., -10., 10.]],
[[-12., 12., -12.], [12., -12., 12.]]],
dtype=np.float32))
labels = np.array([[[1, 0, 0], [1, 0, 0]], [[0, 1, 1], [0, 1, 1]]],
Reported by Pylint.
Line: 1201
Column: 5
class AUCTest(tf.test.TestCase, parameterized.TestCase):
def setup(self):
self.num_thresholds = 3
self.y_pred = tf.constant([0, 0.5, 0.3, 0.9], dtype=tf.float32)
epsilon = 1e-12
self.y_pred_logits = -tf.math.log(1.0 / (self.y_pred + epsilon) - 1.0)
self.y_true = tf.constant([0, 0, 1, 1])
self.sample_weight = [1, 2, 3, 4]
Reported by Pylint.
Line: 1202
Column: 5
def setup(self):
self.num_thresholds = 3
self.y_pred = tf.constant([0, 0.5, 0.3, 0.9], dtype=tf.float32)
epsilon = 1e-12
self.y_pred_logits = -tf.math.log(1.0 / (self.y_pred + epsilon) - 1.0)
self.y_true = tf.constant([0, 0, 1, 1])
self.sample_weight = [1, 2, 3, 4]
Reported by Pylint.
Line: 1204
Column: 5
self.num_thresholds = 3
self.y_pred = tf.constant([0, 0.5, 0.3, 0.9], dtype=tf.float32)
epsilon = 1e-12
self.y_pred_logits = -tf.math.log(1.0 / (self.y_pred + epsilon) - 1.0)
self.y_true = tf.constant([0, 0, 1, 1])
self.sample_weight = [1, 2, 3, 4]
# threshold values are [0 - 1e-7, 0.5, 1 + 1e-7]
# y_pred when threshold = 0 - 1e-7 : [1, 1, 1, 1]
Reported by Pylint.
Line: 1205
Column: 5
self.y_pred = tf.constant([0, 0.5, 0.3, 0.9], dtype=tf.float32)
epsilon = 1e-12
self.y_pred_logits = -tf.math.log(1.0 / (self.y_pred + epsilon) - 1.0)
self.y_true = tf.constant([0, 0, 1, 1])
self.sample_weight = [1, 2, 3, 4]
# threshold values are [0 - 1e-7, 0.5, 1 + 1e-7]
# y_pred when threshold = 0 - 1e-7 : [1, 1, 1, 1]
# y_pred when threshold = 0.5 : [0, 0, 0, 1]
Reported by Pylint.
Line: 1206
Column: 5
epsilon = 1e-12
self.y_pred_logits = -tf.math.log(1.0 / (self.y_pred + epsilon) - 1.0)
self.y_true = tf.constant([0, 0, 1, 1])
self.sample_weight = [1, 2, 3, 4]
# threshold values are [0 - 1e-7, 0.5, 1 + 1e-7]
# y_pred when threshold = 0 - 1e-7 : [1, 1, 1, 1]
# y_pred when threshold = 0.5 : [0, 0, 0, 1]
# y_pred when threshold = 1 + 1e-7 : [0, 0, 0, 0]
Reported by Pylint.
Line: 1499
Column: 5
class MultiAUCTest(tf.test.TestCase, parameterized.TestCase):
def setup(self):
self.num_thresholds = 5
self.y_pred = tf.constant(
np.array([[0, 0.5, 0.3, 0.9], [0.1, 0.2, 0.3, 0.4]]).T,
dtype=tf.float32)
epsilon = 1e-12
Reported by Pylint.
keras/layers/recurrent.py
1329 issues
Line: 16
Column: 1
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access
# pylint: disable=g-classes-have-attributes
"""Recurrent layers and their base classes."""
import tensorflow.compat.v2 as tf
import collections
Reported by Pylint.
Line: 19
Column: 1
# pylint: disable=g-classes-have-attributes
"""Recurrent layers and their base classes."""
import tensorflow.compat.v2 as tf
import collections
import functools
import warnings
Reported by Pylint.
Line: 37
Column: 1
from keras.utils import control_flow_util
from keras.utils import generic_utils
from keras.utils import tf_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
RECURRENT_DROPOUT_WARNING_MSG = (
Reported by Pylint.
Line: 38
Column: 1
from keras.utils import generic_utils
from keras.utils import tf_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
RECURRENT_DROPOUT_WARNING_MSG = (
'RNN `implementation=2` is not supported when `recurrent_dropout` is set. '
Reported by Pylint.
Line: 39
Column: 1
from keras.utils import tf_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
RECURRENT_DROPOUT_WARNING_MSG = (
'RNN `implementation=2` is not supported when `recurrent_dropout` is set. '
'Using `implementation=1`.')
Reported by Pylint.
Line: 190
Column: 1
@classmethod
def from_config(cls, config, custom_objects=None):
from keras.layers import deserialize as deserialize_layer # pylint: disable=g-import-not-at-top
cells = []
for cell_config in config.pop('cells'):
cells.append(
deserialize_layer(cell_config, custom_objects=custom_objects))
return cls(cells, **config)
Reported by Pylint.
Line: 1013
Column: 1
@classmethod
def from_config(cls, config, custom_objects=None):
from keras.layers import deserialize as deserialize_layer # pylint: disable=g-import-not-at-top
cell = deserialize_layer(config.pop('cell'), custom_objects=custom_objects)
num_constants = config.pop('num_constants', 0)
layer = cls(cell, **config)
layer._num_constants = num_constants
return layer
Reported by Pylint.
Line: 121
Column: 3
return tuple(initial_states)
def call(self, inputs, states, constants=None, training=None, **kwargs):
# Recover per-cell states.
state_size = (self.state_size[::-1]
if self.reverse_state_order else self.state_size)
nested_states = tf.nest.pack_sequence_as(state_size, tf.nest.flatten(states))
Reported by Pylint.
Line: 189
Column: 3
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config, custom_objects=None):
from keras.layers import deserialize as deserialize_layer # pylint: disable=g-import-not-at-top
cells = []
for cell_config in config.pop('cells'):
cells.append(
deserialize_layer(cell_config, custom_objects=custom_objects))
Reported by Pylint.
Line: 520
Column: 3
else:
return output_shape
def compute_mask(self, inputs, mask):
# Time step masks must be the same for each input.
# This is because the mask for an RNN is of size [batch, time_steps, 1],
# and specifies which time steps should be skipped, and a time step
# must be skipped for all inputs.
# TODO(scottzhu): Should we accept multiple different masks?
Reported by Pylint.
keras/losses_test.py
1324 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for Keras loss functions."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
from tensorflow.python.autograph.impl import api as autograph
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
from tensorflow.python.autograph.impl import api as autograph
from keras import activations
from keras import backend
Reported by Pylint.
Line: 22
Column: 1
from absl.testing import parameterized
import numpy as np
from tensorflow.python.autograph.impl import api as autograph
from keras import activations
from keras import backend
from keras import combinations
from keras import losses
from keras.utils import losses_utils
Reported by Pylint.
Line: 711
Column: 5
return np.multiply(x, x_inv_norm)
def setup(self, axis=1):
self.np_y_true = np.asarray([[1, 9, 2], [-5, -2, 6]], dtype=np.float32)
self.np_y_pred = np.asarray([[4, 8, 12], [8, 1, 3]], dtype=np.float32)
y_true = self.l2_norm(self.np_y_true, axis)
y_pred = self.l2_norm(self.np_y_pred, axis)
self.expected_loss = np.sum(np.multiply(y_true, y_pred), axis=(axis,))
Reported by Pylint.
Line: 712
Column: 5
def setup(self, axis=1):
self.np_y_true = np.asarray([[1, 9, 2], [-5, -2, 6]], dtype=np.float32)
self.np_y_pred = np.asarray([[4, 8, 12], [8, 1, 3]], dtype=np.float32)
y_true = self.l2_norm(self.np_y_true, axis)
y_pred = self.l2_norm(self.np_y_pred, axis)
self.expected_loss = np.sum(np.multiply(y_true, y_pred), axis=(axis,))
Reported by Pylint.
Line: 716
Column: 5
y_true = self.l2_norm(self.np_y_true, axis)
y_pred = self.l2_norm(self.np_y_pred, axis)
self.expected_loss = np.sum(np.multiply(y_true, y_pred), axis=(axis,))
self.y_true = tf.constant(self.np_y_true)
self.y_pred = tf.constant(self.np_y_pred)
def test_config(self):
Reported by Pylint.
Line: 718
Column: 5
y_pred = self.l2_norm(self.np_y_pred, axis)
self.expected_loss = np.sum(np.multiply(y_true, y_pred), axis=(axis,))
self.y_true = tf.constant(self.np_y_true)
self.y_pred = tf.constant(self.np_y_pred)
def test_config(self):
cosine_obj = losses.CosineSimilarity(
axis=2, reduction=losses_utils.ReductionV2.SUM, name='cosine_loss')
Reported by Pylint.
Line: 719
Column: 5
self.expected_loss = np.sum(np.multiply(y_true, y_pred), axis=(axis,))
self.y_true = tf.constant(self.np_y_true)
self.y_pred = tf.constant(self.np_y_pred)
def test_config(self):
cosine_obj = losses.CosineSimilarity(
axis=2, reduction=losses_utils.ReductionV2.SUM, name='cosine_loss')
self.assertEqual(cosine_obj.name, 'cosine_loss')
Reported by Pylint.
Line: 1545
Column: 5
y_pred = np.asarray([1, 9, 2, -5, -2, 6]).reshape((2, 3))
y_true = np.asarray([4, 8, 12, 8, 1, 3]).reshape((2, 3))
self.batch_size = 6
error = y_pred - y_true
self.expected_losses = np.log((np.exp(error) + np.exp(-error)) / 2)
self.y_pred = tf.constant(y_pred, dtype=tf.float32)
self.y_true = tf.constant(y_true)
Reported by Pylint.
Line: 1547
Column: 5
self.batch_size = 6
error = y_pred - y_true
self.expected_losses = np.log((np.exp(error) + np.exp(-error)) / 2)
self.y_pred = tf.constant(y_pred, dtype=tf.float32)
self.y_true = tf.constant(y_true)
def test_config(self):
Reported by Pylint.
keras/engine/training_v1.py
1293 issues
Line: 16
Column: 1
# limitations under the License.
# ==============================================================================
"""V1 Training-related part of the Keras engine."""
# pylint: disable=g-classes-have-attributes
import tensorflow.compat.v2 as tf
import collections
import warnings
Reported by Pylint.
Line: 17
Column: 1
# ==============================================================================
"""V1 Training-related part of the Keras engine."""
# pylint: disable=g-classes-have-attributes
import tensorflow.compat.v2 as tf
import collections
import warnings
import numpy as np
Reported by Pylint.
Line: 49
Column: 1
from keras.utils import tf_inspect
from keras.utils import tf_utils
from keras.utils.mode_keys import ModeKeys
from tensorflow.python.platform import tf_logging as logging
try:
from scipy.sparse import issparse # pylint: disable=g-import-not-at-top
except ImportError:
issparse = None
Reported by Pylint.
Line: 52
Column: 1
from tensorflow.python.platform import tf_logging as logging
try:
from scipy.sparse import issparse # pylint: disable=g-import-not-at-top
except ImportError:
issparse = None
class Model(training_lib.Model):
Reported by Pylint.
Line: 1408
Column: 1
return [None for _ in self.output_names]
if target_tensors is not None and not (isinstance(target_tensors, list) and
target_tensors == []): # pylint: disable=g-explicit-bool-comparison
if isinstance(target_tensors, list):
if len(target_tensors) != len(self.outputs):
raise ValueError(
'When passing a list as `target_tensors`, '
'it should have one entry per model output. '
Reported by Pylint.
Line: 57
Column: 1
issparse = None
class Model(training_lib.Model):
"""`Model` groups layers into an object with training and inference features.
There are two ways to instantiate a `Model`:
1 - With the "functional API", where you start from `Input`,
Reported by Pylint.
Line: 57
Column: 1
issparse = None
class Model(training_lib.Model):
"""`Model` groups layers into an object with training and inference features.
There are two ways to instantiate a `Model`:
1 - With the "functional API", where you start from `Input`,
Reported by Pylint.
Line: 163
Column: 3
return base_layer.Layer.get_weights(self)
return base_layer.Layer.get_weights(self)
def load_weights(self, filepath, by_name=False, skip_mismatch=False):
"""Loads all layer weights, either from a TensorFlow or an HDF5 weight file.
If `by_name` is False weights are loaded based on the network's
topology. This means the architecture should be the same as when the weights
were saved. Note that layers that don't have weights are not taken into
Reported by Pylint.
Line: 217
Column: 3
return super(Model, self).load_weights(filepath, by_name, skip_mismatch)
@tf.__internal__.tracking.no_automatic_dependency_tracking
def compile(self,
optimizer='rmsprop',
loss=None,
metrics=None,
loss_weights=None,
sample_weight_mode=None,
Reported by Pylint.
Line: 290
Column: 5
# Prepare Session arguments (legacy).
kwargs.pop('cloning', None) # Legacy DistStrat argument, never used.
self._from_serialized = kwargs.pop('from_serialized', False)
allowed_kwargs = {'feed_dict', 'fetches', 'options', 'run_metadata'}
unknown_kwargs = set(kwargs.keys()) - allowed_kwargs
if unknown_kwargs:
raise TypeError(
'Invalid keyword argument(s) in `compile`: %s' % (unknown_kwargs,))
Reported by Pylint.
keras/layers/preprocessing/index_lookup_test.py
1217 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for Keras text vectorization preprocessing layer."""
import tensorflow.compat.v2 as tf
import itertools
import os
import random
import string
Reported by Pylint.
Line: 24
Column: 1
import random
import string
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
from keras import testing_utils
Reported by Pylint.
Line: 317
Column: 3
if use_dataset:
# Keras APIs expect batched datasets.
# TODO(rachelim): `model.predict` predicts the result on each
# dataset batch separately, then tries to concatenate the results
# together. When the results have different shapes on the non-concat
# axis (which can happen in the output_mode = INT case for
# IndexLookup), the concatenation fails. In real use cases, this may
# not be an issue because users are likely to pipe the preprocessing layer
Reported by Pylint.
Line: 1
Column: 1
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import itertools
import os
import random
import string
from absl.testing import parameterized
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
import itertools
import os
import random
import string
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 21
Column: 1
import itertools
import os
import random
import string
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 22
Column: 1
import itertools
import os
import random
import string
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 35
Column: 1
from keras.utils.generic_utils import CustomObjectScope
def zip_and_sort(weight_values):
keys, values = weight_values
return sorted(zip(keys, values), key=lambda x: x[1])
def _get_end_to_end_test_cases():
Reported by Pylint.
Line: 36
Column: 1
def zip_and_sort(weight_values):
keys, values = weight_values
return sorted(zip(keys, values), key=lambda x: x[1])
def _get_end_to_end_test_cases():
test_cases = (
Reported by Pylint.
keras/engine/training.py
1151 issues
Line: 17
Column: 1
# ==============================================================================
"""Training-related part of the Keras engine."""
import tensorflow.compat.v2 as tf
import copy
import itertools
import json
import os
Reported by Pylint.
Line: 25
Column: 1
import os
import warnings
import weakref
from tensorflow.python.eager import context
from keras import backend
from keras import callbacks as callbacks_module
from keras import optimizer_v1
from keras import optimizers
from keras.engine import base_layer
Reported by Pylint.
Line: 52
Column: 1
from keras.utils.io_utils import ask_to_proceed_with_overwrite
from keras.utils.io_utils import path_to_string
from keras.utils.mode_keys import ModeKeys
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
# pylint: disable=g-import-not-at-top
Reported by Pylint.
Line: 53
Column: 1
from keras.utils.io_utils import path_to_string
from keras.utils.mode_keys import ModeKeys
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
# pylint: disable=g-import-not-at-top
try:
Reported by Pylint.
Line: 54
Column: 1
from keras.utils.mode_keys import ModeKeys
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
# pylint: disable=g-import-not-at-top
try:
import h5py
Reported by Pylint.
Line: 57
Column: 1
from tensorflow.tools.docs import doc_controls
# pylint: disable=g-import-not-at-top
try:
import h5py
except ImportError:
h5py = None
# pylint: enable=g-import-not-at-top
Reported by Pylint.
Line: 62
Column: 1
import h5py
except ImportError:
h5py = None
# pylint: enable=g-import-not-at-top
@keras_export('keras.Model', 'keras.models.Model')
class Model(base_layer.Layer, version_utils.ModelVersionSelector):
"""`Model` groups layers into an object with training and inference features.
Reported by Pylint.
Line: 152
Column: 1
# Signature detection
if is_functional_model_init_params(args, kwargs) and cls == Model:
# Functional model
from keras.engine import functional # pylint: disable=g-import-not-at-top
return functional.Functional(skip_init=True, *args, **kwargs)
else:
return super(Model, cls).__new__(cls, *args, **kwargs)
@tf.__internal__.tracking.no_automatic_dependency_tracking
Reported by Pylint.
Line: 166
Column: 1
# Special case for Subclassed Functional Model, which we couldn't detect
# when __new__ is called. We only realize it is a functional model when it
# calls super.__init__ with input and output tensor.
from keras.engine import functional # pylint: disable=g-import-not-at-top
if (is_functional_model_init_params(args, kwargs) and
not isinstance(self, functional.Functional)):
# Filter the kwargs for multiple inheritance.
supported_kwargs = ['inputs', 'outputs', 'name', 'trainable', 'skip_init']
model_kwargs = {k: kwargs[k] for k in kwargs if k in supported_kwargs}
Reported by Pylint.
Line: 729
Column: 1
Returns:
Boolean, whether the model should run eagerly.
"""
if self.dynamic and self._run_eagerly is False: # pylint:disable=g-bool-id-comparison
# TODO(fchollet): consider using py_func to enable this.
raise ValueError('Your model contains layers that can only be '
'successfully run in eager execution (layers '
'constructed with `dynamic=True`). '
'You cannot set `run_eagerly=False`.')
Reported by Pylint.
keras/layers/wrappers_test.py
1081 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for layer wrappers."""
import tensorflow.compat.v2 as tf
import copy
from absl.testing import parameterized
import numpy as np
Reported by Pylint.
Line: 21
Column: 1
import copy
from absl.testing import parameterized
import numpy as np
import keras
from tensorflow.python.framework import test_util as tf_test_util
from keras import combinations
Reported by Pylint.
Line: 25
Column: 1
import numpy as np
import keras
from tensorflow.python.framework import test_util as tf_test_util
from keras import combinations
from keras import keras_parameterized
from keras import testing_utils
from keras.engine import base_layer_utils
from keras.layers import core
Reported by Pylint.
Line: 33
Column: 1
from keras.layers import core
from keras.layers.rnn_cell_wrapper_v2 import ResidualWrapper
from keras.utils import generic_utils
from tensorflow.python.training.tracking import util as trackable_util
class _RNNCellWithConstants(keras.layers.Layer):
def __init__(self, units, constant_size, **kwargs):
Reported by Pylint.
Line: 1271
Column: 1
ragged_rank=1)
x = tf.cast(x, 'float32')
# pylint: disable=g-long-lambda
with self.cached_session():
if merge_mode == 'ave':
merge_func = lambda y, y_rev: (y + y_rev) / 2
elif merge_mode == 'concat':
merge_func = lambda y, y_rev: tf.concat(
Reported by Pylint.
Line: 1280
Column: 1
(y, y_rev), axis=-1)
elif merge_mode == 'mul':
merge_func = lambda y, y_rev: (y * y_rev)
# pylint: enable=g-long-lambda
inputs = keras.Input(
shape=(None, 3), batch_size=4, dtype='float32', ragged=True)
layer = keras.layers.Bidirectional(
rnn(units, return_sequences=True), merge_mode=merge_mode)
Reported by Pylint.
Line: 45
Column: 5
super(_RNNCellWithConstants, self).__init__(**kwargs)
def build(self, input_shape):
self.input_kernel = self.add_weight(
shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
Reported by Pylint.
Line: 49
Column: 5
shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.constant_kernel = self.add_weight(
shape=(self.constant_size, self.units),
Reported by Pylint.
Line: 53
Column: 5
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.constant_kernel = self.add_weight(
shape=(self.constant_size, self.units),
initializer='uniform',
name='constant_kernel')
self.built = True
Reported by Pylint.
Line: 59
Column: 3
name='constant_kernel')
self.built = True
def call(self, inputs, states, constants):
[prev_output] = states
[constant] = constants
h_input = keras.backend.dot(inputs, self.input_kernel)
h_state = keras.backend.dot(prev_output, self.recurrent_kernel)
h_const = keras.backend.dot(constant, self.constant_kernel)
Reported by Pylint.
keras/engine/base_layer_v1.py
1078 issues
Line: 18
Column: 1
# pylint: disable=protected-access
"""Contains the base Layer class, from which all layers inherit."""
import tensorflow.compat.v2 as tf
import collections
import functools
import itertools
import threading
Reported by Pylint.
Line: 46
Column: 1
# A module that only depends on `keras.layers` import these from here.
from keras.utils.generic_utils import to_snake_case # pylint: disable=unused-import
from keras.utils.tf_utils import is_tensor_or_tensor_list # pylint: disable=unused-import
from tensorflow.python.platform import tf_logging
from tensorflow.tools.docs import doc_controls
# pylint: disable=g-classes-have-attributes
class Layer(base_layer.Layer):
Reported by Pylint.
Line: 47
Column: 1
from keras.utils.generic_utils import to_snake_case # pylint: disable=unused-import
from keras.utils.tf_utils import is_tensor_or_tensor_list # pylint: disable=unused-import
from tensorflow.python.platform import tf_logging
from tensorflow.tools.docs import doc_controls
# pylint: disable=g-classes-have-attributes
class Layer(base_layer.Layer):
"""Base layer class.
Reported by Pylint.
Line: 50
Column: 1
from tensorflow.tools.docs import doc_controls
# pylint: disable=g-classes-have-attributes
class Layer(base_layer.Layer):
"""Base layer class.
This is the class from which all layers inherit.
Reported by Pylint.
Line: 1606
Column: 9
"""
if not self.built:
if getattr(self, '_is_graph_network', False):
with tf_utils.maybe_init_scope(self):
self._maybe_build(self.inputs)
else:
raise ValueError('You tried to call `count_params` on ' + self.name +
', but the layer isn\'t built. '
'You can build it manually via: `' + self.name +
Reported by Pylint.
Line: 2067
Column: 9
# Any setup work performed only once should happen in an `init_scope`
# to avoid creating symbolic Tensors that will later pollute any eager
# operations.
with tf_utils.maybe_init_scope(self):
self.build(input_shapes)
# We must set also ensure that the layer is marked as built, and the build
# shape is stored since user defined build functions may not be calling
# `super.build()`
Layer.build(self, input_shapes)
Reported by Pylint.
Line: 2208
Column: 1
pass
# Keep track of metric instance created in subclassed layer.
from keras import metrics as metrics_module # pylint: disable=g-import-not-at-top
for val in tf.nest.flatten(value):
if isinstance(val, metrics_module.Metric) and hasattr(self, '_metrics'):
self._metrics.append(val)
# TODO(scottzhu): Need to track Module object as well for weight tracking.
Reported by Pylint.
Line: 2262
Column: 5
def _init_call_fn_args(self, expects_training_arg=None):
# Clear cached call function arguments.
self.__class__._call_full_argspec.fget.cache.pop(self, None)
self.__class__._call_fn_args.fget.cache.pop(self, None)
self.__class__._call_accepts_kwargs.fget.cache.pop(self, None)
call_fn_args = self._call_fn_args
if expects_training_arg is None:
Reported by Pylint.
Line: 2263
Column: 5
def _init_call_fn_args(self, expects_training_arg=None):
# Clear cached call function arguments.
self.__class__._call_full_argspec.fget.cache.pop(self, None)
self.__class__._call_fn_args.fget.cache.pop(self, None)
self.__class__._call_accepts_kwargs.fget.cache.pop(self, None)
call_fn_args = self._call_fn_args
if expects_training_arg is None:
self._expects_training_arg = ('training' in call_fn_args or
Reported by Pylint.
Line: 2264
Column: 5
# Clear cached call function arguments.
self.__class__._call_full_argspec.fget.cache.pop(self, None)
self.__class__._call_fn_args.fget.cache.pop(self, None)
self.__class__._call_accepts_kwargs.fget.cache.pop(self, None)
call_fn_args = self._call_fn_args
if expects_training_arg is None:
self._expects_training_arg = ('training' in call_fn_args or
self._call_accepts_kwargs)
Reported by Pylint.
keras/legacy_tf_layers/convolutional_test.py
1059 issues
Line: 21
Column: 1
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
import numpy as np
from keras.legacy_tf_layers import convolutional as conv_layers
Reported by Pylint.
Line: 1
Column: 1
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
Reported by Pylint.
Line: 27
Column: 1
from keras.legacy_tf_layers import convolutional as conv_layers
class ConvTest(tf.test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
images = tf.random.uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegex(ValueError, 'data_format'):
Reported by Pylint.
Line: 27
Column: 1
from keras.legacy_tf_layers import convolutional as conv_layers
class ConvTest(tf.test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
images = tf.random.uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegex(ValueError, 'data_format'):
Reported by Pylint.
Line: 29
Column: 1
class ConvTest(tf.test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
images = tf.random.uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegex(ValueError, 'data_format'):
conv_layers.conv2d(images, 32, 3, data_format='invalid')
Reported by Pylint.
Line: 29
Column: 3
class ConvTest(tf.test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
images = tf.random.uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegex(ValueError, 'data_format'):
conv_layers.conv2d(images, 32, 3, data_format='invalid')
Reported by Pylint.
Line: 29
Column: 3
class ConvTest(tf.test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
images = tf.random.uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegex(ValueError, 'data_format'):
conv_layers.conv2d(images, 32, 3, data_format='invalid')
Reported by Pylint.
Line: 30
Column: 1
class ConvTest(tf.test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
images = tf.random.uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegex(ValueError, 'data_format'):
conv_layers.conv2d(images, 32, 3, data_format='invalid')
def testInvalidStrides(self):
Reported by Pylint.
Line: 31
Column: 1
def testInvalidDataFormat(self):
height, width = 7, 9
images = tf.random.uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegex(ValueError, 'data_format'):
conv_layers.conv2d(images, 32, 3, data_format='invalid')
def testInvalidStrides(self):
height, width = 7, 9
Reported by Pylint.
Line: 32
Column: 1
def testInvalidDataFormat(self):
height, width = 7, 9
images = tf.random.uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegex(ValueError, 'data_format'):
conv_layers.conv2d(images, 32, 3, data_format='invalid')
def testInvalidStrides(self):
height, width = 7, 9
images = tf.random.uniform((5, height, width, 3), seed=1)
Reported by Pylint.