The following issues were found
keras/constraints_test.py
91 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for Keras weights constraints."""
import tensorflow.compat.v2 as tf
import math
import numpy as np
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import math
import numpy as np
from keras import backend
from keras import combinations
Reported by Pylint.
Line: 28
Column: 1
from keras import constraints
def get_test_values():
return [0.1, 0.5, 3, 8, 1e-7]
def get_example_array():
np.random.seed(3537)
Reported by Pylint.
Line: 29
Column: 1
def get_test_values():
return [0.1, 0.5, 3, 8, 1e-7]
def get_example_array():
np.random.seed(3537)
example_array = np.random.random((100, 100)) * 100. - 50.
Reported by Pylint.
Line: 32
Column: 1
return [0.1, 0.5, 3, 8, 1e-7]
def get_example_array():
np.random.seed(3537)
example_array = np.random.random((100, 100)) * 100. - 50.
example_array[0, 0] = 0. # 0 could possibly cause trouble
return example_array
Reported by Pylint.
Line: 33
Column: 1
def get_example_array():
np.random.seed(3537)
example_array = np.random.random((100, 100)) * 100. - 50.
example_array[0, 0] = 0. # 0 could possibly cause trouble
return example_array
Reported by Pylint.
Line: 34
Column: 1
def get_example_array():
np.random.seed(3537)
example_array = np.random.random((100, 100)) * 100. - 50.
example_array[0, 0] = 0. # 0 could possibly cause trouble
return example_array
def get_example_kernel(width):
Reported by Pylint.
Line: 35
Column: 1
def get_example_array():
np.random.seed(3537)
example_array = np.random.random((100, 100)) * 100. - 50.
example_array[0, 0] = 0. # 0 could possibly cause trouble
return example_array
def get_example_kernel(width):
np.random.seed(3537)
Reported by Pylint.
Line: 36
Column: 1
np.random.seed(3537)
example_array = np.random.random((100, 100)) * 100. - 50.
example_array[0, 0] = 0. # 0 could possibly cause trouble
return example_array
def get_example_kernel(width):
np.random.seed(3537)
example_array = np.random.rand(width, width, 2, 2)
Reported by Pylint.
Line: 39
Column: 1
return example_array
def get_example_kernel(width):
np.random.seed(3537)
example_array = np.random.rand(width, width, 2, 2)
return example_array
Reported by Pylint.
keras/utils/layer_utils_test.py
91 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for layer_utils."""
import tensorflow.compat.v2 as tf
import collections
import contextlib
import multiprocessing.dummy
import pickle
Reported by Pylint.
Line: 105
Column: 20
# because numpy limits the range of seeds) to ensure that an instance
# returns the same value in different threads, but different instances
# return different values.
return int(np.random.RandomState(id(self) % (2 ** 31)).randint(2 ** 16))
def get_test_property(self, _):
"""Function provided to .map for threading test."""
return self.test_property
Reported by Pylint.
Line: 146
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b301-pickle
self.assertEqual(id(first_instance), first_instance.my_id)
# Test that we can pickle and un-pickle
second_instance = pickle.loads(pickle.dumps(first_instance))
self.assertEqual(id(second_instance), second_instance.my_id)
self.assertNotEqual(first_instance.my_id, second_instance.my_id)
# Make sure de-serialized object uses the cache.
Reported by Bandit.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
import collections
import contextlib
import multiprocessing.dummy
import pickle
import time
import timeit
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
import collections
import contextlib
import multiprocessing.dummy
import pickle
import time
import timeit
Reported by Pylint.
Line: 21
Column: 1
import collections
import contextlib
import multiprocessing.dummy
import pickle
import time
import timeit
import numpy as np
Reported by Pylint.
Line: 22
Column: 1
import collections
import contextlib
import multiprocessing.dummy
import pickle
import time
import timeit
import numpy as np
Reported by Pylint.
Line: 22
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle
import collections
import contextlib
import multiprocessing.dummy
import pickle
import time
import timeit
import numpy as np
Reported by Bandit.
Line: 23
Column: 1
import contextlib
import multiprocessing.dummy
import pickle
import time
import timeit
import numpy as np
from keras.utils import layer_utils
Reported by Pylint.
Line: 24
Column: 1
import multiprocessing.dummy
import pickle
import time
import timeit
import numpy as np
from keras.utils import layer_utils
Reported by Pylint.
keras/layers/__init__.py
91 issues
Line: 17
Column: 1
# ==============================================================================
"""Keras layers API."""
import tensorflow.compat.v2 as tf
# pylint: disable=g-bad-import-order,g-direct-tensorflow-import,disable=g-import-not-at-top
from tensorflow.python import tf2
# Generic layers.
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
# pylint: disable=g-bad-import-order,g-direct-tensorflow-import,disable=g-import-not-at-top
from tensorflow.python import tf2
# Generic layers.
from keras.engine.input_layer import Input
from keras.engine.input_layer import InputLayer
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
# pylint: disable=g-bad-import-order,g-direct-tensorflow-import,disable=g-import-not-at-top
from tensorflow.python import tf2
# Generic layers.
from keras.engine.input_layer import Input
from keras.engine.input_layer import InputLayer
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
# pylint: disable=g-bad-import-order,g-direct-tensorflow-import,disable=g-import-not-at-top
from tensorflow.python import tf2
# Generic layers.
from keras.engine.input_layer import Input
from keras.engine.input_layer import InputLayer
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
# pylint: disable=g-bad-import-order,g-direct-tensorflow-import,disable=g-import-not-at-top
from tensorflow.python import tf2
# Generic layers.
from keras.engine.input_layer import Input
from keras.engine.input_layer import InputLayer
from keras.engine.input_spec import InputSpec
Reported by Pylint.
Line: 271
Column: 12
serialization.populate_deserializable_objects()
if name in serialization.LOCAL.ALL_OBJECTS:
return serialization.LOCAL.ALL_OBJECTS[name]
return super(VersionAwareLayers, self).__getattr__(name)
Reported by Pylint.
Line: 157
Column: 1
from keras.layers.normalization.batch_normalization import SyncBatchNormalization
if tf.__internal__.tf2.enabled():
from keras.layers.normalization.batch_normalization import BatchNormalization
from keras.layers.normalization.batch_normalization_v1 import BatchNormalization as BatchNormalizationV1
BatchNormalizationV2 = BatchNormalization
else:
from keras.layers.normalization.batch_normalization_v1 import BatchNormalization
from keras.layers.normalization.batch_normalization import BatchNormalization as BatchNormalizationV2
Reported by Pylint.
Line: 158
Column: 1
if tf.__internal__.tf2.enabled():
from keras.layers.normalization.batch_normalization import BatchNormalization
from keras.layers.normalization.batch_normalization_v1 import BatchNormalization as BatchNormalizationV1
BatchNormalizationV2 = BatchNormalization
else:
from keras.layers.normalization.batch_normalization_v1 import BatchNormalization
from keras.layers.normalization.batch_normalization import BatchNormalization as BatchNormalizationV2
BatchNormalizationV1 = BatchNormalization
Reported by Pylint.
Line: 158
Column: 1
if tf.__internal__.tf2.enabled():
from keras.layers.normalization.batch_normalization import BatchNormalization
from keras.layers.normalization.batch_normalization_v1 import BatchNormalization as BatchNormalizationV1
BatchNormalizationV2 = BatchNormalization
else:
from keras.layers.normalization.batch_normalization_v1 import BatchNormalization
from keras.layers.normalization.batch_normalization import BatchNormalization as BatchNormalizationV2
BatchNormalizationV1 = BatchNormalization
Reported by Pylint.
Line: 159
Column: 1
if tf.__internal__.tf2.enabled():
from keras.layers.normalization.batch_normalization import BatchNormalization
from keras.layers.normalization.batch_normalization_v1 import BatchNormalization as BatchNormalizationV1
BatchNormalizationV2 = BatchNormalization
else:
from keras.layers.normalization.batch_normalization_v1 import BatchNormalization
from keras.layers.normalization.batch_normalization import BatchNormalization as BatchNormalizationV2
BatchNormalizationV1 = BatchNormalization
Reported by Pylint.
keras/utils/dataset_creator_test.py
90 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for dataset_creator."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from keras import combinations
from keras.distribute import multi_worker_testing_utils
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from keras import combinations
from keras.distribute import multi_worker_testing_utils
from keras.engine import data_adapter
from keras.engine import sequential
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from keras import combinations
from keras.distribute import multi_worker_testing_utils
from keras.engine import data_adapter
from keras.engine import sequential
from keras.layers import core as core_layers
Reported by Pylint.
Line: 28
Column: 1
from keras.layers import core as core_layers
from keras.optimizer_v2 import gradient_descent
from keras.utils import dataset_creator
from tensorflow.python.training.server_lib import ClusterSpec
class DatasetCreatorTest(tf.test.TestCase, parameterized.TestCase):
def test_dataset_creator(self):
Reported by Pylint.
Line: 113
Column: 21
# Ensuring the resulting `DistributedDatasetsFromFunction` has the right
# options.
self.assertTrue(data_handler._dataset._options.experimental_fetch_to_device)
self.assertEqual(
data_handler._dataset._options.experimental_per_replica_buffer_size, 2)
def test_dataset_creator_input_options_with_cluster_coordinator(self):
dataset_fn = lambda _: tf.data.Dataset.from_tensor_slices([1, 1])
Reported by Pylint.
Line: 113
Column: 21
# Ensuring the resulting `DistributedDatasetsFromFunction` has the right
# options.
self.assertTrue(data_handler._dataset._options.experimental_fetch_to_device)
self.assertEqual(
data_handler._dataset._options.experimental_per_replica_buffer_size, 2)
def test_dataset_creator_input_options_with_cluster_coordinator(self):
dataset_fn = lambda _: tf.data.Dataset.from_tensor_slices([1, 1])
Reported by Pylint.
Line: 115
Column: 9
# options.
self.assertTrue(data_handler._dataset._options.experimental_fetch_to_device)
self.assertEqual(
data_handler._dataset._options.experimental_per_replica_buffer_size, 2)
def test_dataset_creator_input_options_with_cluster_coordinator(self):
dataset_fn = lambda _: tf.data.Dataset.from_tensor_slices([1, 1])
input_options = tf.distribute.InputOptions(
experimental_fetch_to_device=True,
Reported by Pylint.
Line: 115
Column: 9
# options.
self.assertTrue(data_handler._dataset._options.experimental_fetch_to_device)
self.assertEqual(
data_handler._dataset._options.experimental_per_replica_buffer_size, 2)
def test_dataset_creator_input_options_with_cluster_coordinator(self):
dataset_fn = lambda _: tf.data.Dataset.from_tensor_slices([1, 1])
input_options = tf.distribute.InputOptions(
experimental_fetch_to_device=True,
Reported by Pylint.
Line: 126
Column: 7
strategy = self._get_parameter_server_strategy()
with strategy.scope():
model = sequential.Sequential([core_layers.Dense(10)])
model._cluster_coordinator = tf.distribute.experimental.coordinator.ClusterCoordinator(
strategy)
data_handler = data_adapter.get_data_handler(
x, steps_per_epoch=2, model=model)
iter_rv = iter(data_handler._dataset)._values[0]
Reported by Pylint.
Line: 131
Column: 20
data_handler = data_adapter.get_data_handler(
x, steps_per_epoch=2, model=model)
iter_rv = iter(data_handler._dataset)._values[0]
iter_rv._rebuild_on(model._cluster_coordinator._cluster.workers[0])
distributed_iterator = iter_rv._get_values()
# Ensuring the resulting `DistributedIterator` has the right options.
self.assertTrue(distributed_iterator._options.experimental_fetch_to_device)
Reported by Pylint.
keras/legacy_tf_layers/convolutional.py
90 issues
Line: 15
Column: 1
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# pylint: disable=g-classes-have-attributes
"""Contains the convolutional layer classes and their functional aliases."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
Reported by Pylint.
Line: 21
Column: 1
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
import warnings
from keras import layers as keras_layers
from keras.legacy_tf_layers import base
Reported by Pylint.
Line: 27
Column: 1
from keras import layers as keras_layers
from keras.legacy_tf_layers import base
from tensorflow.python.util.tf_export import keras_export
from tensorflow.python.util.tf_export import tf_export
@keras_export(v1=['keras.__internal__.legacy.layers.Conv1D'])
@tf_export(v1=['layers.Conv1D'])
Reported by Pylint.
Line: 28
Column: 1
from keras import layers as keras_layers
from keras.legacy_tf_layers import base
from tensorflow.python.util.tf_export import keras_export
from tensorflow.python.util.tf_export import tf_export
@keras_export(v1=['keras.__internal__.legacy.layers.Conv1D'])
@tf_export(v1=['layers.Conv1D'])
class Conv1D(keras_layers.Conv1D, base.Layer):
Reported by Pylint.
Line: 1
Column: 1
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
Reported by Pylint.
Line: 23
Column: 1
import tensorflow.compat.v2 as tf
import warnings
from keras import layers as keras_layers
from keras.legacy_tf_layers import base
from tensorflow.python.util.tf_export import keras_export
from tensorflow.python.util.tf_export import tf_export
Reported by Pylint.
Line: 34
Column: 1
@keras_export(v1=['keras.__internal__.legacy.layers.Conv1D'])
@tf_export(v1=['layers.Conv1D'])
class Conv1D(keras_layers.Conv1D, base.Layer):
"""1D convolution layer (e.g. temporal convolution).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
Reported by Pylint.
Line: 112
Column: 1
@end_compatibility
"""
def __init__(self, filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
Reported by Pylint.
Line: 112
Column: 3
@end_compatibility
"""
def __init__(self, filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
Reported by Pylint.
Line: 112
Column: 3
@end_compatibility
"""
def __init__(self, filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
Reported by Pylint.
keras/layers/preprocessing/benchmarks/feature_column_benchmark.py
90 issues
Line: 17
Column: 1
# ==============================================================================
"""Benchmark suite for KPL and feature column implementations."""
import tensorflow as tf
import itertools
import math
import random
import string
import time
Reported by Pylint.
Line: 26
Column: 1
import numpy as np
import keras
# This is required as of 3/2021 because otherwise we drop into graph mode.
tf.compat.v1.enable_v2_behavior()
Reported by Pylint.
Line: 61
Column: 5
self.steps += 1
def on_predict_end(self, _):
self.tn = time.time()
self.t_avg = (self.tn - self.t0) / self.steps
def create_data(length, num_entries, max_value, dtype):
"""Create a ragged tensor with random data entries."""
Reported by Pylint.
Line: 62
Column: 5
def on_predict_end(self, _):
self.tn = time.time()
self.t_avg = (self.tn - self.t0) / self.steps
def create_data(length, num_entries, max_value, dtype):
"""Create a ragged tensor with random data entries."""
lengths = (np.random.random(size=num_entries) * length).astype(int)
Reported by Pylint.
Line: 18
Column: 1
"""Benchmark suite for KPL and feature column implementations."""
import tensorflow as tf
import itertools
import math
import random
import string
import time
Reported by Pylint.
Line: 19
Column: 1
import tensorflow as tf
import itertools
import math
import random
import string
import time
import numpy as np
Reported by Pylint.
Line: 20
Column: 1
import tensorflow as tf
import itertools
import math
import random
import string
import time
import numpy as np
Reported by Pylint.
Line: 21
Column: 1
import itertools
import math
import random
import string
import time
import numpy as np
import keras
Reported by Pylint.
Line: 22
Column: 1
import math
import random
import string
import time
import numpy as np
import keras
Reported by Pylint.
Line: 32
Column: 1
tf.compat.v1.enable_v2_behavior()
class LayerBenchmark(tf.test.Benchmark):
"""Benchmark the layer forward pass."""
def report(self, name, keras_time, fc_time, iters):
"""Calculate and report benchmark statistics."""
extras = {
Reported by Pylint.
keras/layers/preprocessing/preprocessing_stage.py
90 issues
Line: 17
Column: 1
# ==============================================================================
"""Preprocessing stage."""
import tensorflow.compat.v2 as tf
# pylint: disable=g-classes-have-attributes
import numpy as np
from keras.engine import base_preprocessing_layer
from keras.engine import functional
Reported by Pylint.
Line: 18
Column: 1
"""Preprocessing stage."""
import tensorflow.compat.v2 as tf
# pylint: disable=g-classes-have-attributes
import numpy as np
from keras.engine import base_preprocessing_layer
from keras.engine import functional
from keras.engine import sequential
Reported by Pylint.
Line: 28
Column: 1
# Sequential methods should take precedence.
class PreprocessingStage(sequential.Sequential,
base_preprocessing_layer.PreprocessingLayer):
"""A sequential preprocessing stage.
This preprocessing stage wraps a list of preprocessing layers into a
Sequential-like object that enables you to `adapt()` the whole list via
Reported by Pylint.
Line: 28
Column: 1
# Sequential methods should take precedence.
class PreprocessingStage(sequential.Sequential,
base_preprocessing_layer.PreprocessingLayer):
"""A sequential preprocessing stage.
This preprocessing stage wraps a list of preprocessing layers into a
Sequential-like object that enables you to `adapt()` the whole list via
Reported by Pylint.
Line: 41
Column: 3
name: String. Optional name for the preprocessing stage object.
"""
def adapt(self, data, reset_state=True):
"""Adapt the state of the layers of the preprocessing stage to the data.
Args:
data: A batched Dataset object, or a NumPy array, or an EagerTensor.
Data to be iterated over to adapt the state of the layers in this
Reported by Pylint.
Line: 95
Column: 1
# Functional methods should take precedence.
class FunctionalPreprocessingStage(functional.Functional,
base_preprocessing_layer.PreprocessingLayer):
"""A functional preprocessing stage.
This preprocessing stage wraps a graph of preprocessing layers into a
Functional-like object that enables you to `adapt()` the whole graph via
Reported by Pylint.
Line: 95
Column: 1
# Functional methods should take precedence.
class FunctionalPreprocessingStage(functional.Functional,
base_preprocessing_layer.PreprocessingLayer):
"""A functional preprocessing stage.
This preprocessing stage wraps a graph of preprocessing layers into a
Functional-like object that enables you to `adapt()` the whole graph via
Reported by Pylint.
Line: 138
Column: 3
'`fit`. Instead, you may feed data to `adapt` the stage to set '
'appropriate states of the layers in the stage.')
def adapt(self, data, reset_state=True):
"""Adapt the state of the layers of the preprocessing stage to the data.
Args:
data: A batched Dataset object, a NumPy array, an EagerTensor, or a list,
dict or nested structure of Numpy Arrays or EagerTensors. The elements
Reported by Pylint.
Line: 28
Column: 1
# Sequential methods should take precedence.
class PreprocessingStage(sequential.Sequential,
base_preprocessing_layer.PreprocessingLayer):
"""A sequential preprocessing stage.
This preprocessing stage wraps a list of preprocessing layers into a
Sequential-like object that enables you to `adapt()` the whole list via
Reported by Pylint.
Line: 30
Column: 1
# Sequential methods should take precedence.
class PreprocessingStage(sequential.Sequential,
base_preprocessing_layer.PreprocessingLayer):
"""A sequential preprocessing stage.
This preprocessing stage wraps a list of preprocessing layers into a
Sequential-like object that enables you to `adapt()` the whole list via
a single `adapt()` call on the preprocessing stage.
Reported by Pylint.
keras/engine/correctness_test.py
89 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for numerical correctness."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
from keras import testing_utils
Reported by Pylint.
Line: 27
Column: 1
from keras import testing_utils
class MultiInputSubclassed(keras.Model):
"""Subclassed Model that adds its inputs and then adds a bias."""
def __init__(self):
super(MultiInputSubclassed, self).__init__()
self.add = keras.layers.Add()
Reported by Pylint.
Line: 35
Column: 3
self.add = keras.layers.Add()
self.bias = testing_utils.Bias()
def call(self, inputs):
added = self.add(inputs)
return self.bias(added)
def multi_input_functional():
Reported by Pylint.
Line: 28
Column: 1
class MultiInputSubclassed(keras.Model):
"""Subclassed Model that adds its inputs and then adds a bias."""
def __init__(self):
super(MultiInputSubclassed, self).__init__()
self.add = keras.layers.Add()
self.bias = testing_utils.Bias()
Reported by Pylint.
Line: 30
Column: 1
class MultiInputSubclassed(keras.Model):
"""Subclassed Model that adds its inputs and then adds a bias."""
def __init__(self):
super(MultiInputSubclassed, self).__init__()
self.add = keras.layers.Add()
self.bias = testing_utils.Bias()
def call(self, inputs):
Reported by Pylint.
Line: 31
Column: 5
"""Subclassed Model that adds its inputs and then adds a bias."""
def __init__(self):
super(MultiInputSubclassed, self).__init__()
self.add = keras.layers.Add()
self.bias = testing_utils.Bias()
def call(self, inputs):
added = self.add(inputs)
Reported by Pylint.
Line: 31
Column: 1
"""Subclassed Model that adds its inputs and then adds a bias."""
def __init__(self):
super(MultiInputSubclassed, self).__init__()
self.add = keras.layers.Add()
self.bias = testing_utils.Bias()
def call(self, inputs):
added = self.add(inputs)
Reported by Pylint.
Line: 32
Column: 1
def __init__(self):
super(MultiInputSubclassed, self).__init__()
self.add = keras.layers.Add()
self.bias = testing_utils.Bias()
def call(self, inputs):
added = self.add(inputs)
return self.bias(added)
Reported by Pylint.
Line: 33
Column: 1
def __init__(self):
super(MultiInputSubclassed, self).__init__()
self.add = keras.layers.Add()
self.bias = testing_utils.Bias()
def call(self, inputs):
added = self.add(inputs)
return self.bias(added)
Reported by Pylint.
keras/layers/convolutional_transpose_test.py
88 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for convolutional transpose layers."""
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
Reported by Pylint.
Line: 19
Column: 1
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import numpy as np
import keras
from keras import keras_parameterized
from keras import testing_utils
Reported by Pylint.
Line: 47
Column: 3
('padding_same', {'padding': 'same'}),
('strides', {'strides': (2, 2)}),
# Only runs on GPU with CUDA, channels_first is not supported on CPU.
# TODO(b/62340061): Support channels_first on CPU.
('data_format', {'data_format': 'channels_first'}),
('strides_output_padding', {'strides': (2, 2), 'output_padding': (1, 1)}),
)
def test_conv2d_transpose(self, kwargs):
kwargs['filters'] = 2
Reported by Pylint.
Line: 141
Column: 3
('padding_same', {'padding': 'same'}),
('strides', {'strides': (2, 2, 2)}),
# Only runs on GPU with CUDA, channels_first is not supported on CPU.
# TODO(b/62340061): Support channels_first on CPU.
('data_format', {'data_format': 'channels_first'}),
('strides_output_padding', {'strides': (2, 2, 2),
'output_padding': (1, 1, 1)}),
)
def test_conv3d_transpose(self, kwargs):
Reported by Pylint.
Line: 28
Column: 1
@keras_parameterized.run_all_keras_modes
class Conv2DTransposeTest(keras_parameterized.TestCase):
def _run_test(self, kwargs):
num_samples = 2
stack_size = 3
num_row = 7
Reported by Pylint.
Line: 30
Column: 1
@keras_parameterized.run_all_keras_modes
class Conv2DTransposeTest(keras_parameterized.TestCase):
def _run_test(self, kwargs):
num_samples = 2
stack_size = 3
num_row = 7
num_col = 6
Reported by Pylint.
Line: 31
Column: 1
class Conv2DTransposeTest(keras_parameterized.TestCase):
def _run_test(self, kwargs):
num_samples = 2
stack_size = 3
num_row = 7
num_col = 6
with self.cached_session():
Reported by Pylint.
Line: 32
Column: 1
def _run_test(self, kwargs):
num_samples = 2
stack_size = 3
num_row = 7
num_col = 6
with self.cached_session():
testing_utils.layer_test(
Reported by Pylint.
Line: 33
Column: 1
def _run_test(self, kwargs):
num_samples = 2
stack_size = 3
num_row = 7
num_col = 6
with self.cached_session():
testing_utils.layer_test(
keras.layers.Conv2DTranspose,
Reported by Pylint.
Line: 34
Column: 1
num_samples = 2
stack_size = 3
num_row = 7
num_col = 6
with self.cached_session():
testing_utils.layer_test(
keras.layers.Conv2DTranspose,
kwargs=kwargs,
Reported by Pylint.
keras/tests/graph_util_test.py
88 issues
Line: 17
Column: 1
# ==============================================================================
"""Tests for tensorflow.python.client.graph_util."""
import tensorflow.compat.v2 as tf
import numpy as np
from tensorflow.core.protobuf import meta_graph_pb2
import keras
from tensorflow.python.grappler import tf_optimizer
Reported by Pylint.
Line: 20
Column: 1
import tensorflow.compat.v2 as tf
import numpy as np
from tensorflow.core.protobuf import meta_graph_pb2
import keras
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.training.saver import export_meta_graph
Reported by Pylint.
Line: 21
Column: 1
import numpy as np
from tensorflow.core.protobuf import meta_graph_pb2
import keras
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.training.saver import export_meta_graph
class ConvertVariablesToConstantsTest(tf.test.TestCase):
Reported by Pylint.
Line: 22
Column: 1
import numpy as np
from tensorflow.core.protobuf import meta_graph_pb2
import keras
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.training.saver import export_meta_graph
class ConvertVariablesToConstantsTest(tf.test.TestCase):
Reported by Pylint.
Line: 23
Column: 1
from tensorflow.core.protobuf import meta_graph_pb2
import keras
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.training.saver import export_meta_graph
class ConvertVariablesToConstantsTest(tf.test.TestCase):
def _get_tensors(self, sess, tensor_list):
Reported by Pylint.
Line: 22
Column: 1
import numpy as np
from tensorflow.core.protobuf import meta_graph_pb2
import keras
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.training.saver import export_meta_graph
class ConvertVariablesToConstantsTest(tf.test.TestCase):
Reported by Pylint.
Line: 26
Column: 1
from tensorflow.python.training.saver import export_meta_graph
class ConvertVariablesToConstantsTest(tf.test.TestCase):
def _get_tensors(self, sess, tensor_list):
"""Returns a list of Tensor objects from the Session."""
return [
sess.graph.get_tensor_by_name(tensor.name) for tensor in tensor_list
Reported by Pylint.
Line: 28
Column: 1
class ConvertVariablesToConstantsTest(tf.test.TestCase):
def _get_tensors(self, sess, tensor_list):
"""Returns a list of Tensor objects from the Session."""
return [
sess.graph.get_tensor_by_name(tensor.name) for tensor in tensor_list
]
Reported by Pylint.
Line: 28
Column: 3
class ConvertVariablesToConstantsTest(tf.test.TestCase):
def _get_tensors(self, sess, tensor_list):
"""Returns a list of Tensor objects from the Session."""
return [
sess.graph.get_tensor_by_name(tensor.name) for tensor in tensor_list
]
Reported by Pylint.
Line: 29
Column: 1
class ConvertVariablesToConstantsTest(tf.test.TestCase):
def _get_tensors(self, sess, tensor_list):
"""Returns a list of Tensor objects from the Session."""
return [
sess.graph.get_tensor_by_name(tensor.name) for tensor in tensor_list
]
def _get_tensor_names(self, tensors):
Reported by Pylint.