The following issues were found
test/jit/test_script_profile.py
32 issues
Line: 4
Column: 1
import os
import sys
import torch
from torch import nn
# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
Reported by Pylint.
Line: 5
Column: 1
import sys
import torch
from torch import nn
# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase
Reported by Pylint.
Line: 10
Column: 1
# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase
if __name__ == '__main__':
raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
"\tpython test/test_jit.py TESTNAME\n\n"
"instead.")
Reported by Pylint.
Line: 24
Column: 23
self.lstm2 = nn.LSTMCell(51, 51)
self.linear = nn.Linear(51, 1)
def forward(self, input):
outputs = []
h_t = torch.zeros(input.size(0), 51)
c_t = torch.zeros(input.size(0), 51)
h_t2 = torch.zeros(input.size(0), 51)
c_t2 = torch.zeros(input.size(0), 51)
Reported by Pylint.
Line: 43
Column: 13
def test_basic(self):
seq = torch.jit.script(Sequence())
p = torch.jit._ScriptProfile()
p.enable()
seq(torch.rand((10, 100)))
p.disable()
self.assertNotEqual(p.dump_string(), "")
Reported by Pylint.
Line: 54
Column: 17
@torch.jit.script
def fn():
p = torch.jit._ScriptProfile()
p.enable()
_ = seq(torch.rand((10, 100)))
p.disable()
return p
Reported by Pylint.
Line: 64
Column: 21
def test_multi(self):
seq = torch.jit.script(Sequence())
profiles = [torch.jit._ScriptProfile() for _ in range(5)]
for p in profiles:
p.enable()
last = None
while len(profiles) > 0:
Reported by Pylint.
Line: 84
Column: 17
@torch.jit.script
def fn():
p = torch.jit._ScriptProfile()
p.enable()
_ = seq(torch.rand((10, 100)))
p.disable()
stats0 = p.dump_string()
Reported by Pylint.
Line: 106
Column: 13
self.assertNotEqual(s1, s2)
def test_empty(self):
p = torch.jit._ScriptProfile()
p.enable()
p.disable()
self.assertEqual(p.dump_string(), "")
Reported by Pylint.
Line: 1
Column: 1
import os
import sys
import torch
from torch import nn
# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
Reported by Pylint.
caffe2/contrib/fakelowp/test/test_deq_swish_quant_nnpi.py
31 issues
Line: 2
Column: 1
import numpy as np
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
from hypothesis import settings
Reported by Pylint.
Line: 3
Column: 1
import numpy as np
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
from hypothesis import settings
Reported by Pylint.
Line: 4
Column: 1
import numpy as np
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
from hypothesis import settings
Reported by Pylint.
Line: 5
Column: 1
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
from hypothesis import settings
core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])
Reported by Pylint.
Line: 6
Column: 1
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
from hypothesis import settings
core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])
Reported by Pylint.
Line: 8
Column: 1
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
from hypothesis import settings
core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])
class DeqSwishQuantTest(serial.SerializedTestCase):
def _get_scale_zp(self, tensor):
Reported by Pylint.
Line: 2
Column: 1
import numpy as np
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
from hypothesis import settings
Reported by Pylint.
Line: 130
Column: 3
1 if o.type == "Onnxifi" else 0 for o in net_onnxified.op
)
np.testing.assert_equal(num_onnxified_ops, 1)
# TODO: add an assertion to check the optimized net
# fused Dequantize->Swish->Quantize to QuantizedSwish
workspace.CreateNet(net_onnxified)
workspace.RunNet(net_onnxified.name)
Y_glow = workspace.FetchInt8Blob("Y")
U_int8 = workspace.FetchInt8Blob("U_int8")
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
from hypothesis import settings
Reported by Pylint.
Line: 7
Column: 1
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
from hypothesis import settings
core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])
class DeqSwishQuantTest(serial.SerializedTestCase):
Reported by Pylint.
caffe2/contrib/fakelowp/test/test_chunking.py
31 issues
Line: 2
Column: 1
# Must happen before importing caffe2.python.*
import caffe2.python.fakelowp.init_shared_libs # noqa
import datetime
import numpy as np
from hypothesis import given, settings, example
from hypothesis import strategies as st
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
Reported by Pylint.
Line: 5
Column: 1
import caffe2.python.fakelowp.init_shared_libs # noqa
import datetime
import numpy as np
from hypothesis import given, settings, example
from hypothesis import strategies as st
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
Reported by Pylint.
Line: 6
Column: 1
import datetime
import numpy as np
from hypothesis import given, settings, example
from hypothesis import strategies as st
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
Reported by Pylint.
Line: 7
Column: 1
import numpy as np
from hypothesis import given, settings, example
from hypothesis import strategies as st
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
# Test that parallel chunks behave the same way as the serial one
Reported by Pylint.
Line: 8
Column: 1
from hypothesis import given, settings, example
from hypothesis import strategies as st
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
# Test that parallel chunks behave the same way as the serial one
Reported by Pylint.
Line: 9
Column: 1
from hypothesis import strategies as st
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
# Test that parallel chunks behave the same way as the serial one
workspace.GlobalInit(
Reported by Pylint.
Line: 10
Column: 1
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
# Test that parallel chunks behave the same way as the serial one
workspace.GlobalInit(
[
Reported by Pylint.
Line: 2
Column: 1
# Must happen before importing caffe2.python.*
import caffe2.python.fakelowp.init_shared_libs # noqa
import datetime
import numpy as np
from hypothesis import given, settings, example
from hypothesis import strategies as st
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
Reported by Pylint.
Line: 5
Column: 1
import caffe2.python.fakelowp.init_shared_libs # noqa
import datetime
import numpy as np
from hypothesis import given, settings, example
from hypothesis import strategies as st
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
Reported by Pylint.
Line: 47
Column: 40
)
# @example(m=64, k=5423, n=553, scale=1e-3, zp=120, rand_seed=1)
@settings(deadline=datetime.timedelta(seconds=1000), max_examples=1)
def test_ParallelFC(self, m, k, n, scale, zp, rand_seed):
np.random.seed(rand_seed)
workspace.ResetWorkspace()
# Y = W_T * X + b
X_fp32 = np.random.uniform(-1, 1, size=(m, k)).astype(np.float16) \
Reported by Pylint.
caffe2/contrib/fakelowp/test/test_batchnorm_nnpi_fp16.py
31 issues
Line: 4
Column: 1
import numpy as np
import unittest
import caffe2.python.fakelowp.init_shared_libs # noqa
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
Reported by Pylint.
Line: 5
Column: 1
import unittest
import caffe2.python.fakelowp.init_shared_libs # noqa
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
Reported by Pylint.
Line: 6
Column: 1
import caffe2.python.fakelowp.init_shared_libs # noqa
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
Reported by Pylint.
Line: 7
Column: 1
import caffe2.python.fakelowp.init_shared_libs # noqa
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
Reported by Pylint.
Line: 8
Column: 1
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
Reported by Pylint.
Line: 9
Column: 1
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
Reported by Pylint.
Line: 10
Column: 1
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
core.GlobalInit(["caffe2", "--glow_global_fp16=1",
Reported by Pylint.
Line: 11
Column: 1
from caffe2.python import core
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
core.GlobalInit(["caffe2", "--glow_global_fp16=1",
"--glow_global_fused_scale_offset_fp16=1",
Reported by Pylint.
Line: 12
Column: 1
from caffe2.python import workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import caffe2.python.serialized_test.serialized_test_util as serial
import datetime
core.GlobalInit(["caffe2", "--glow_global_fp16=1",
"--glow_global_fused_scale_offset_fp16=1",
"--glow_global_force_sls_fp16_accum=1"])
Reported by Pylint.
Line: 2
Column: 1
import numpy as np
import unittest
import caffe2.python.fakelowp.init_shared_libs # noqa
from hypothesis import given, settings
from hypothesis import strategies as st
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import workspace
Reported by Pylint.
caffe2/contrib/fakelowp/test/test_batchmatmul_nnpi_fp16.py
31 issues
Line: 3
Column: 1
import numpy as np
import unittest
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
Reported by Pylint.
Line: 5
Column: 1
import unittest
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
from hypothesis import given, settings
Reported by Pylint.
Line: 6
Column: 1
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
from hypothesis import given, settings
import hypothesis.strategies as st
Reported by Pylint.
Line: 7
Column: 1
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
from hypothesis import given, settings
import hypothesis.strategies as st
import caffe2.python.serialized_test.serialized_test_util as serial
Reported by Pylint.
Line: 8
Column: 1
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
from hypothesis import given, settings
import hypothesis.strategies as st
import caffe2.python.serialized_test.serialized_test_util as serial
Reported by Pylint.
Line: 10
Column: 1
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
from hypothesis import given, settings
import hypothesis.strategies as st
import caffe2.python.serialized_test.serialized_test_util as serial
core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])
Reported by Pylint.
Line: 11
Column: 1
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
from hypothesis import given, settings
import hypothesis.strategies as st
import caffe2.python.serialized_test.serialized_test_util as serial
core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])
Reported by Pylint.
Line: 12
Column: 1
import datetime
from hypothesis import given, settings
import hypothesis.strategies as st
import caffe2.python.serialized_test.serialized_test_util as serial
core.GlobalInit(["caffe2", "--caffe2_log_level=-3", "--glow_global_fp16=1"])
class TestBatchMatMul(serial.SerializedTestCase):
Reported by Pylint.
Line: 3
Column: 1
import numpy as np
import unittest
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import unittest
import caffe2.python.fakelowp.init_shared_libs # noqa
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
from caffe2.python.onnx.onnxifi import onnxifi_caffe2_net
from caffe2.python.fakelowp.test_utils import print_test_debug_info
import datetime
Reported by Pylint.
scripts/release_notes/commitlist.py
31 issues
Line: 11
Column: 1
import re
"""
Example Usages
Create a new commitlist for consumption by categorize.py.
Said commitlist contains commits between v1.5.0 and f5bc91f851.
Reported by Pylint.
Line: 1
Column: 1
import argparse
from common import run, topics
from collections import defaultdict
import os
import csv
import pprint
from common import CommitDataCache
import re
Reported by Pylint.
Line: 3
Column: 1
import argparse
from common import run, topics
from collections import defaultdict
import os
import csv
import pprint
from common import CommitDataCache
import re
Reported by Pylint.
Line: 4
Column: 1
import argparse
from common import run, topics
from collections import defaultdict
import os
import csv
import pprint
from common import CommitDataCache
import re
Reported by Pylint.
Line: 5
Column: 1
from common import run, topics
from collections import defaultdict
import os
import csv
import pprint
from common import CommitDataCache
import re
Reported by Pylint.
Line: 6
Column: 1
from collections import defaultdict
import os
import csv
import pprint
from common import CommitDataCache
import re
"""
Reported by Pylint.
Line: 8
Column: 1
import csv
import pprint
from common import CommitDataCache
import re
"""
Example Usages
Reported by Pylint.
Line: 25
Column: 1
"""
class Commit:
def __init__(self, commit_hash, category, topic, title):
self.commit_hash = commit_hash
self.category = category
self.topic = topic
self.title = title
Reported by Pylint.
Line: 43
Column: 1
def __repr__(self):
return f'Commit({self.commit_hash}, {self.category}, {self.topic}, {self.title})'
class CommitList:
# NB: Private ctor. Use `from_existing` or `create_new`.
def __init__(self, path, commits):
self.path = path
self.commits = commits
Reported by Pylint.
Line: 50
Column: 5
self.commits = commits
@staticmethod
def from_existing(path):
commits = CommitList.read_from_disk(path)
return CommitList(path, commits)
@staticmethod
def create_new(path, base_version, new_version):
Reported by Pylint.
torch/distributions/lkj_cholesky.py
31 issues
Line: 61
Column: 23
self.dim = dim
self.concentration, = broadcast_all(concentration)
batch_shape = self.concentration.size()
event_shape = torch.Size((dim, dim))
# This is used to draw vectorized samples from the beta distribution in Sec. 3.2 of [1].
marginal_conc = self.concentration + 0.5 * (self.dim - 2)
offset = torch.arange(self.dim - 1, dtype=self.concentration.dtype, device=self.concentration.device)
offset = torch.cat([offset.new_zeros((1,)), offset])
beta_conc1 = offset + 0.5
Reported by Pylint.
Line: 64
Column: 18
event_shape = torch.Size((dim, dim))
# This is used to draw vectorized samples from the beta distribution in Sec. 3.2 of [1].
marginal_conc = self.concentration + 0.5 * (self.dim - 2)
offset = torch.arange(self.dim - 1, dtype=self.concentration.dtype, device=self.concentration.device)
offset = torch.cat([offset.new_zeros((1,)), offset])
beta_conc1 = offset + 0.5
beta_conc0 = marginal_conc.unsqueeze(-1) - 0.5 * offset
self._beta = Beta(beta_conc1, beta_conc0)
super(LKJCholesky, self).__init__(batch_shape, event_shape, validate_args)
Reported by Pylint.
Line: 65
Column: 18
# This is used to draw vectorized samples from the beta distribution in Sec. 3.2 of [1].
marginal_conc = self.concentration + 0.5 * (self.dim - 2)
offset = torch.arange(self.dim - 1, dtype=self.concentration.dtype, device=self.concentration.device)
offset = torch.cat([offset.new_zeros((1,)), offset])
beta_conc1 = offset + 0.5
beta_conc0 = marginal_conc.unsqueeze(-1) - 0.5 * offset
self._beta = Beta(beta_conc1, beta_conc0)
super(LKJCholesky, self).__init__(batch_shape, event_shape, validate_args)
Reported by Pylint.
Line: 73
Column: 23
def expand(self, batch_shape, _instance=None):
new = self._get_checked_instance(LKJCholesky, _instance)
batch_shape = torch.Size(batch_shape)
new.dim = self.dim
new.concentration = self.concentration.expand(batch_shape)
new._beta = self._beta.expand(batch_shape + (self.dim,))
super(LKJCholesky, new).__init__(batch_shape, self.event_shape, validate_args=False)
new._validate_args = self._validate_args
Reported by Pylint.
Line: 81
Column: 35
new._validate_args = self._validate_args
return new
def sample(self, sample_shape=torch.Size()):
# This uses the Onion method, but there are a few differences from [1] Sec. 3.2:
# - This vectorizes the for loop and also works for heterogeneous eta.
# - Same algorithm generalizes to n=1.
# - The procedure is simplified since we are sampling the cholesky factor of
# the correlation matrix instead of the correlation matrix itself. As such,
Reported by Pylint.
Line: 89
Column: 20
# the correlation matrix instead of the correlation matrix itself. As such,
# we only need to generate `w`.
y = self._beta.sample(sample_shape).unsqueeze(-1)
u_normal = torch.randn(self._extended_shape(sample_shape),
dtype=y.dtype,
device=y.device).tril(-1)
u_hypersphere = u_normal / u_normal.norm(dim=-1, keepdim=True)
# Replace NaNs in first row
u_hypersphere[..., 0, :].fill_(0.)
Reported by Pylint.
Line: 95
Column: 13
u_hypersphere = u_normal / u_normal.norm(dim=-1, keepdim=True)
# Replace NaNs in first row
u_hypersphere[..., 0, :].fill_(0.)
w = torch.sqrt(y) * u_hypersphere
# Fill diagonal elements; clamp for numerical stability
eps = torch.finfo(w.dtype).tiny
diag_elems = torch.clamp(1 - torch.sum(w**2, dim=-1), min=eps).sqrt()
w += torch.diag_embed(diag_elems)
return w
Reported by Pylint.
Line: 97
Column: 15
u_hypersphere[..., 0, :].fill_(0.)
w = torch.sqrt(y) * u_hypersphere
# Fill diagonal elements; clamp for numerical stability
eps = torch.finfo(w.dtype).tiny
diag_elems = torch.clamp(1 - torch.sum(w**2, dim=-1), min=eps).sqrt()
w += torch.diag_embed(diag_elems)
return w
def log_prob(self, value):
Reported by Pylint.
Line: 98
Column: 22
w = torch.sqrt(y) * u_hypersphere
# Fill diagonal elements; clamp for numerical stability
eps = torch.finfo(w.dtype).tiny
diag_elems = torch.clamp(1 - torch.sum(w**2, dim=-1), min=eps).sqrt()
w += torch.diag_embed(diag_elems)
return w
def log_prob(self, value):
# See: https://mc-stan.org/docs/2_25/functions-reference/cholesky-lkj-correlation-distribution.html
Reported by Pylint.
Line: 98
Column: 38
w = torch.sqrt(y) * u_hypersphere
# Fill diagonal elements; clamp for numerical stability
eps = torch.finfo(w.dtype).tiny
diag_elems = torch.clamp(1 - torch.sum(w**2, dim=-1), min=eps).sqrt()
w += torch.diag_embed(diag_elems)
return w
def log_prob(self, value):
# See: https://mc-stan.org/docs/2_25/functions-reference/cholesky-lkj-correlation-distribution.html
Reported by Pylint.
caffe2/quantization/server/elementwise_mul_dnnlowp_op_test.py
31 issues
Line: 6
Column: 1
import collections
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep, workspace
from caffe2.quantization.server.dnnlowp_test_utils import check_quantized_results_close
from hypothesis import given, settings
Reported by Pylint.
Line: 10
Column: 1
import numpy as np
from caffe2.python import core, dyndep, workspace
from caffe2.quantization.server.dnnlowp_test_utils import check_quantized_results_close
from hypothesis import given, settings
dyndep.InitOpsLibrary("//caffe2/caffe2/quantization/server:dnnlowp_ops")
workspace.GlobalInit(["caffe2", "--caffe2_omp_num_threads=11"])
Reported by Pylint.
Line: 28
Column: 71
)
@settings(deadline=None)
def test_dnnlowp_elementwise_mul_int(
self, N, is_empty, in_quantized, out_quantized, in_place, gc, dc
):
if is_empty:
N = 0
# FIXME: DNNLOWP Mul doesn't support inplace operation and
# dequantize_output=1 at the same time
Reported by Pylint.
Line: 32
Column: 3
):
if is_empty:
N = 0
# FIXME: DNNLOWP Mul doesn't support inplace operation and
# dequantize_output=1 at the same time
if in_place[0] or in_place[1]:
in_quantized = True
out_quantized = True
Reported by Pylint.
Line: 104
Column: 58
@given(**hu.gcs_cpu_only)
@settings(deadline=None)
def test_dnnlowp_elementwise_mul_broadcast(self, gc, dc):
# Set broadcast and no axis, i.e. broadcasting last dimensions.
min_ = -100
max_ = min_ + 255
A = np.round(np.random.rand(2, 3, 4, 5) * (max_ - min_) + min_)
A = A.astype(np.float32)
Reported by Pylint.
Line: 144
Column: 63
@given(**hu.gcs_cpu_only)
@settings(deadline=None)
def test_dnnlowp_elementwise_mul_broadcast_axis(self, gc, dc):
for bdim, axis in [
((3, 4), 1), # broadcasting intermediate dimensions
((2,), 0), # broadcasting the first dimension
((1, 4, 1), 1),
]:
Reported by Pylint.
Line: 1
Column: 1
import collections
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep, workspace
from caffe2.quantization.server.dnnlowp_test_utils import check_quantized_results_close
Reported by Pylint.
Line: 17
Column: 1
workspace.GlobalInit(["caffe2", "--caffe2_omp_num_threads=11"])
class DNNLowPMulOpTest(hu.HypothesisTestCase):
@given(
N=st.integers(32, 256),
is_empty=st.booleans(),
in_quantized=st.booleans(),
out_quantized=st.booleans(),
Reported by Pylint.
Line: 26
Column: 5
in_place=st.sampled_from([(False, False), (True, False), (False, True)]),
**hu.gcs_cpu_only
)
@settings(deadline=None)
def test_dnnlowp_elementwise_mul_int(
self, N, is_empty, in_quantized, out_quantized, in_place, gc, dc
):
if is_empty:
N = 0
Reported by Pylint.
Line: 26
Column: 5
in_place=st.sampled_from([(False, False), (True, False), (False, True)]),
**hu.gcs_cpu_only
)
@settings(deadline=None)
def test_dnnlowp_elementwise_mul_int(
self, N, is_empty, in_quantized, out_quantized, in_place, gc, dc
):
if is_empty:
N = 0
Reported by Pylint.
torch/distributed/pipeline/sync/pipe.py
31 issues
Line: 17
Column: 1
import torch.autograd
import torch.cuda
from . import microbatch
from .batchnorm import DeferredBatchNorm
from .pipeline import Pipeline
from .skip.layout import inspect_skip_layout
from .skip.skippable import verify_skippables
from .stream import AbstractStream, new_stream
Reported by Pylint.
Line: 18
Column: 1
import torch.cuda
from . import microbatch
from .batchnorm import DeferredBatchNorm
from .pipeline import Pipeline
from .skip.layout import inspect_skip_layout
from .skip.skippable import verify_skippables
from .stream import AbstractStream, new_stream
Reported by Pylint.
Line: 19
Column: 1
from . import microbatch
from .batchnorm import DeferredBatchNorm
from .pipeline import Pipeline
from .skip.layout import inspect_skip_layout
from .skip.skippable import verify_skippables
from .stream import AbstractStream, new_stream
__all__ = ["Pipe"]
Reported by Pylint.
Line: 20
Column: 1
from . import microbatch
from .batchnorm import DeferredBatchNorm
from .pipeline import Pipeline
from .skip.layout import inspect_skip_layout
from .skip.skippable import verify_skippables
from .stream import AbstractStream, new_stream
__all__ = ["Pipe"]
Reported by Pylint.
Line: 21
Column: 1
from .batchnorm import DeferredBatchNorm
from .pipeline import Pipeline
from .skip.layout import inspect_skip_layout
from .skip.skippable import verify_skippables
from .stream import AbstractStream, new_stream
__all__ = ["Pipe"]
Reported by Pylint.
Line: 22
Column: 1
from .pipeline import Pipeline
from .skip.layout import inspect_skip_layout
from .skip.skippable import verify_skippables
from .stream import AbstractStream, new_stream
__all__ = ["Pipe"]
Device = Union[torch.device, int, str]
Reported by Pylint.
Line: 27
Column: 16
__all__ = ["Pipe"]
Device = Union[torch.device, int, str]
Devices = Union[Iterable[Device], List[Device]]
Tensors = Sequence[Tensor]
TensorOrTensors = Union[Tensor, Tensors]
Reported by Pylint.
Line: 35
Column: 14
if TYPE_CHECKING:
# Typechecking: nn.Module is not a Generic
Module = nn.Module[TensorOrTensors] # type: ignore[type-arg]
NamedModules = OrderedDict[str, Module]
else:
Module = nn.Module
NamedModules = OrderedDict
Reported by Pylint.
Line: 36
Column: 20
if TYPE_CHECKING:
# Typechecking: nn.Module is not a Generic
Module = nn.Module[TensorOrTensors] # type: ignore[type-arg]
NamedModules = OrderedDict[str, Module]
else:
Module = nn.Module
NamedModules = OrderedDict
Reported by Pylint.
Line: 71
Column: 75
def _verify_splitting(
module: nn.Sequential, partitions: List[nn.Sequential], devices: List[torch.device]
) -> None:
num_parameters = len(list(module.parameters()))
num_child_parameters = sum(len(list(child.parameters())) for child in module.children())
if num_parameters == num_child_parameters:
return
Reported by Pylint.
torch/_linalg_utils.py
31 issues
Line: 14
Column: 28
def is_sparse(A):
"""Check if tensor A is a sparse tensor"""
if isinstance(A, torch.Tensor):
return A.layout == torch.sparse_coo
error_str = "expected Tensor"
if not torch.jit.is_scripting():
error_str += " but got {}".format(type(A))
raise TypeError(error_str)
Reported by Pylint.
Line: 27
Column: 33
Integer types map to float32.
"""
dtype = A.dtype
if dtype in (torch.float16, torch.float32, torch.float64):
return dtype
return torch.float32
def matmul(A: Optional[Tensor], B: Tensor) -> Tensor:
Reported by Pylint.
Line: 27
Column: 18
Integer types map to float32.
"""
dtype = A.dtype
if dtype in (torch.float16, torch.float32, torch.float64):
return dtype
return torch.float32
def matmul(A: Optional[Tensor], B: Tensor) -> Tensor:
Reported by Pylint.
Line: 27
Column: 48
Integer types map to float32.
"""
dtype = A.dtype
if dtype in (torch.float16, torch.float32, torch.float64):
return dtype
return torch.float32
def matmul(A: Optional[Tensor], B: Tensor) -> Tensor:
Reported by Pylint.
Line: 29
Column: 12
dtype = A.dtype
if dtype in (torch.float16, torch.float32, torch.float64):
return dtype
return torch.float32
def matmul(A: Optional[Tensor], B: Tensor) -> Tensor:
"""Multiply two matrices.
Reported by Pylint.
Line: 42
Column: 12
return B
if is_sparse(A):
return torch.sparse.mm(A, B)
return torch.matmul(A, B)
def conjugate(A):
"""Return conjugate of tensor A.
Reported by Pylint.
Line: 87
Column: 13
# torch.orgqr is not available in CUDA
Q = torch.linalg.qr(A).Q
else:
Q = torch.orgqr(*torch.geqrf(A))
return Q
def symeig(A: Tensor, largest: Optional[bool] = False) -> Tuple[Tensor, Tensor]:
"""Return eigenpairs of A with specified ordering.
Reported by Pylint.
Line: 87
Column: 26
# torch.orgqr is not available in CUDA
Q = torch.linalg.qr(A).Q
else:
Q = torch.orgqr(*torch.geqrf(A))
return Q
def symeig(A: Tensor, largest: Optional[bool] = False) -> Tuple[Tensor, Tensor]:
"""Return eigenpairs of A with specified ordering.
Reported by Pylint.
Line: 99
Column: 13
E, Z = torch.linalg.eigh(A, UPLO='U')
# assuming that E is ordered
if largest:
E = torch.flip(E, dims=(-1,))
Z = torch.flip(Z, dims=(-1,))
return E, Z
Reported by Pylint.
Line: 100
Column: 13
# assuming that E is ordered
if largest:
E = torch.flip(E, dims=(-1,))
Z = torch.flip(Z, dims=(-1,))
return E, Z
Reported by Pylint.