The following issues were found
collectors/python.d.plugin/python_modules/pyyaml2/representer.py
91 issues
Line: 6
Column: 1
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
'RepresenterError']
from error import *
from nodes import *
import datetime
import sys, copy_reg, types
Reported by Pylint.
Line: 7
Column: 1
'RepresenterError']
from error import *
from nodes import *
import datetime
import sys, copy_reg, types
Reported by Pylint.
Line: 11
Column: 1
import datetime
import sys, copy_reg, types
class RepresenterError(YAMLError):
pass
class BaseRepresenter(object):
Reported by Pylint.
Line: 13
Column: 24
import sys, copy_reg, types
class RepresenterError(YAMLError):
pass
class BaseRepresenter(object):
yaml_representers = {}
Reported by Pylint.
Line: 30
Column: 9
def represent(self, data):
node = self.represent_data(data)
self.serialize(node)
self.represented_objects = {}
self.object_keeper = []
self.alias_key = None
def get_classobj_bases(self, cls):
Reported by Pylint.
Line: 55
Column: 26
#self.represented_objects[alias_key] = None
self.object_keeper.append(data)
data_types = type(data).__mro__
if type(data) is types.InstanceType:
data_types = self.get_classobj_bases(data.__class__)+list(data_types)
if data_types[0] in self.yaml_representers:
node = self.yaml_representers[data_types[0]](self, data)
else:
for data_type in data_types:
Reported by Pylint.
Line: 70
Column: 28
elif None in self.yaml_representers:
node = self.yaml_representers[None](self, data)
else:
node = ScalarNode(None, unicode(data))
#if alias_key is not None:
# self.represented_objects[alias_key] = node
return node
def add_representer(cls, data_type, representer):
Reported by Pylint.
Line: 70
Column: 45
elif None in self.yaml_representers:
node = self.yaml_representers[None](self, data)
else:
node = ScalarNode(None, unicode(data))
#if alias_key is not None:
# self.represented_objects[alias_key] = node
return node
def add_representer(cls, data_type, representer):
Reported by Pylint.
Line: 90
Column: 16
def represent_scalar(self, tag, value, style=None):
if style is None:
style = self.default_style
node = ScalarNode(tag, value, style=style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
return node
def represent_sequence(self, tag, sequence, flow_style=None):
Reported by Pylint.
Line: 97
Column: 16
def represent_sequence(self, tag, sequence, flow_style=None):
value = []
node = SequenceNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
for item in sequence:
node_item = self.represent_data(item)
Reported by Pylint.
collectors/python.d.plugin/anomalies/anomalies.chart.py
87 issues
Line: 14
Column: 1
import requests
import numpy as np
import pandas as pd
from netdata_pandas.data import get_data, get_allmetrics_async
from pyod.models.hbos import HBOS
from pyod.models.pca import PCA
from pyod.models.loda import LODA
from pyod.models.iforest import IForest
Reported by Pylint.
Line: 15
Column: 1
import requests
import numpy as np
import pandas as pd
from netdata_pandas.data import get_data, get_allmetrics_async
from pyod.models.hbos import HBOS
from pyod.models.pca import PCA
from pyod.models.loda import LODA
from pyod.models.iforest import IForest
from pyod.models.cblof import CBLOF
Reported by Pylint.
Line: 16
Column: 1
import numpy as np
import pandas as pd
from netdata_pandas.data import get_data, get_allmetrics_async
from pyod.models.hbos import HBOS
from pyod.models.pca import PCA
from pyod.models.loda import LODA
from pyod.models.iforest import IForest
from pyod.models.cblof import CBLOF
from pyod.models.feature_bagging import FeatureBagging
Reported by Pylint.
Line: 17
Column: 1
import pandas as pd
from netdata_pandas.data import get_data, get_allmetrics_async
from pyod.models.hbos import HBOS
from pyod.models.pca import PCA
from pyod.models.loda import LODA
from pyod.models.iforest import IForest
from pyod.models.cblof import CBLOF
from pyod.models.feature_bagging import FeatureBagging
from pyod.models.copod import COPOD
Reported by Pylint.
Line: 18
Column: 1
from netdata_pandas.data import get_data, get_allmetrics_async
from pyod.models.hbos import HBOS
from pyod.models.pca import PCA
from pyod.models.loda import LODA
from pyod.models.iforest import IForest
from pyod.models.cblof import CBLOF
from pyod.models.feature_bagging import FeatureBagging
from pyod.models.copod import COPOD
from sklearn.preprocessing import MinMaxScaler
Reported by Pylint.
Line: 19
Column: 1
from pyod.models.hbos import HBOS
from pyod.models.pca import PCA
from pyod.models.loda import LODA
from pyod.models.iforest import IForest
from pyod.models.cblof import CBLOF
from pyod.models.feature_bagging import FeatureBagging
from pyod.models.copod import COPOD
from sklearn.preprocessing import MinMaxScaler
Reported by Pylint.
Line: 20
Column: 1
from pyod.models.pca import PCA
from pyod.models.loda import LODA
from pyod.models.iforest import IForest
from pyod.models.cblof import CBLOF
from pyod.models.feature_bagging import FeatureBagging
from pyod.models.copod import COPOD
from sklearn.preprocessing import MinMaxScaler
from bases.FrameworkServices.SimpleService import SimpleService
Reported by Pylint.
Line: 21
Column: 1
from pyod.models.loda import LODA
from pyod.models.iforest import IForest
from pyod.models.cblof import CBLOF
from pyod.models.feature_bagging import FeatureBagging
from pyod.models.copod import COPOD
from sklearn.preprocessing import MinMaxScaler
from bases.FrameworkServices.SimpleService import SimpleService
Reported by Pylint.
Line: 22
Column: 1
from pyod.models.iforest import IForest
from pyod.models.cblof import CBLOF
from pyod.models.feature_bagging import FeatureBagging
from pyod.models.copod import COPOD
from sklearn.preprocessing import MinMaxScaler
from bases.FrameworkServices.SimpleService import SimpleService
# ignore some sklearn/numpy warnings that are ok
Reported by Pylint.
Line: 23
Column: 1
from pyod.models.cblof import CBLOF
from pyod.models.feature_bagging import FeatureBagging
from pyod.models.copod import COPOD
from sklearn.preprocessing import MinMaxScaler
from bases.FrameworkServices.SimpleService import SimpleService
# ignore some sklearn/numpy warnings that are ok
warnings.filterwarnings('ignore', r'All-NaN slice encountered')
Reported by Pylint.
collectors/python.d.plugin/smartd_log/smartd_log.chart.py
82 issues
Line: 11
Column: 1
from copy import deepcopy
from time import time
from bases.FrameworkServices.SimpleService import SimpleService
from bases.collection import read_last_line
INCREMENTAL = 'incremental'
ABSOLUTE = 'absolute'
Reported by Pylint.
Line: 12
Column: 1
from time import time
from bases.FrameworkServices.SimpleService import SimpleService
from bases.collection import read_last_line
INCREMENTAL = 'incremental'
ABSOLUTE = 'absolute'
ATA = 'ata'
Reported by Pylint.
Line: 63
Column: 7
ATTR_TEMPERATURE = 'temperature'
RE_ATA = re.compile(
'(\d+);' # attribute
'(\d+);' # normalized value
'(\d+)', # raw value
re.X
)
Reported by Pylint.
Line: 64
Column: 7
RE_ATA = re.compile(
'(\d+);' # attribute
'(\d+);' # normalized value
'(\d+)', # raw value
re.X
)
RE_SCSI = re.compile(
Reported by Pylint.
Line: 65
Column: 7
RE_ATA = re.compile(
'(\d+);' # attribute
'(\d+);' # normalized value
'(\d+)', # raw value
re.X
)
RE_SCSI = re.compile(
'([a-z-]+);' # attribute
Reported by Pylint.
Line: 1
Column: 1
# -*- coding: utf-8 -*-
# Description: smart netdata python.d module
# Author: ilyam8, vorph1
# SPDX-License-Identifier: GPL-3.0-or-later
import os
import re
from copy import deepcopy
from time import time
Reported by Pylint.
Line: 121
Column: 1
CHARTS = {
'read_error_rate': {
'options': [None, 'Read Error Rate', 'value', 'errors', 'smartd_log.read_error_rate', 'line'],
'lines': [],
'attrs': [ATTR1],
'algo': ABSOLUTE,
},
'seek_error_rate': {
Reported by Pylint.
Line: 127
Column: 1
'algo': ABSOLUTE,
},
'seek_error_rate': {
'options': [None, 'Seek Error Rate', 'value', 'errors', 'smartd_log.seek_error_rate', 'line'],
'lines': [],
'attrs': [ATTR7],
'algo': ABSOLUTE,
},
'soft_read_error_rate': {
Reported by Pylint.
Line: 133
Column: 1
'algo': ABSOLUTE,
},
'soft_read_error_rate': {
'options': [None, 'Soft Read Error Rate', 'errors', 'errors', 'smartd_log.soft_read_error_rate', 'line'],
'lines': [],
'attrs': [ATTR13],
'algo': INCREMENTAL,
},
'write_error_rate': {
Reported by Pylint.
Line: 139
Column: 1
'algo': INCREMENTAL,
},
'write_error_rate': {
'options': [None, 'Write Error Rate', 'value', 'errors', 'smartd_log.write_error_rate', 'line'],
'lines': [],
'attrs': [ATTR206],
'algo': ABSOLUTE,
},
'read_total_err_corrected': {
Reported by Pylint.
collectors/python.d.plugin/python_modules/urllib3/util/selectors.py
74 issues
Line: 15
Column: 1
import socket
import sys
import time
from collections import namedtuple, Mapping
try:
monotonic = time.monotonic
except (AttributeError, ImportError): # Python 3.3<
monotonic = time.time
Reported by Pylint.
Line: 110
Column: 71
# Also test for the Windows equivalent of EINTR.
is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and
errcode == errno.WSAEINTR))
if is_interrupt:
if expires is not None:
current_time = monotonic()
if current_time > expires:
Reported by Pylint.
Line: 462
Column: 28
""" Kqueue / Kevent-based selector """
def __init__(self):
super(KqueueSelector, self).__init__()
self._kqueue = select.kqueue()
def fileno(self):
return self._kqueue.fileno()
def register(self, fileobj, events, data=None):
Reported by Pylint.
Line: 470
Column: 26
def register(self, fileobj, events, data=None):
key = super(KqueueSelector, self).register(fileobj, events, data)
if events & EVENT_READ:
kevent = select.kevent(key.fd,
select.KQ_FILTER_READ,
select.KQ_EV_ADD)
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
Reported by Pylint.
Line: 471
Column: 40
key = super(KqueueSelector, self).register(fileobj, events, data)
if events & EVENT_READ:
kevent = select.kevent(key.fd,
select.KQ_FILTER_READ,
select.KQ_EV_ADD)
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
if events & EVENT_WRITE:
Reported by Pylint.
Line: 472
Column: 40
if events & EVENT_READ:
kevent = select.kevent(key.fd,
select.KQ_FILTER_READ,
select.KQ_EV_ADD)
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
if events & EVENT_WRITE:
kevent = select.kevent(key.fd,
Reported by Pylint.
Line: 477
Column: 26
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
if events & EVENT_WRITE:
kevent = select.kevent(key.fd,
select.KQ_FILTER_WRITE,
select.KQ_EV_ADD)
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
Reported by Pylint.
Line: 478
Column: 40
if events & EVENT_WRITE:
kevent = select.kevent(key.fd,
select.KQ_FILTER_WRITE,
select.KQ_EV_ADD)
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
return key
Reported by Pylint.
Line: 479
Column: 40
if events & EVENT_WRITE:
kevent = select.kevent(key.fd,
select.KQ_FILTER_WRITE,
select.KQ_EV_ADD)
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
return key
Reported by Pylint.
Line: 488
Column: 26
def unregister(self, fileobj):
key = super(KqueueSelector, self).unregister(fileobj)
if key.events & EVENT_READ:
kevent = select.kevent(key.fd,
select.KQ_FILTER_READ,
select.KQ_EV_DELETE)
try:
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
except SelectorError:
Reported by Pylint.
collectors/python.d.plugin/nvidia_smi/nvidia_smi.chart.py
74 issues
Line: 14
Column: 1
import xml.etree.ElementTree as et
from bases.FrameworkServices.SimpleService import SimpleService
from bases.collection import find_binary
disabled_by_default = True
NVIDIA_SMI = 'nvidia-smi'
Reported by Pylint.
Line: 15
Column: 1
import xml.etree.ElementTree as et
from bases.FrameworkServices.SimpleService import SimpleService
from bases.collection import find_binary
disabled_by_default = True
NVIDIA_SMI = 'nvidia-smi'
Reported by Pylint.
Line: 532
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b313-b320-xml-bad-elementtree
def parse_xml(self, data):
try:
return et.fromstring(data)
except et.ParseError as error:
self.error('xml parse failed: "{0}", error: {1}'.format(data, error))
return None
Reported by Bandit.
Line: 1
Column: 1
# -*- coding: utf-8 -*-
# Description: nvidia-smi netdata python.d module
# Original Author: Steven Noonan (tycho)
# Author: Ilya Mashchenko (ilyam8)
# User Memory Stat Author: Guido Scatena (scatenag)
import subprocess
import threading
import os
Reported by Pylint.
Line: 7
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b404-import-subprocess
# Author: Ilya Mashchenko (ilyam8)
# User Memory Stat Author: Guido Scatena (scatenag)
import subprocess
import threading
import os
import pwd
import xml.etree.ElementTree as et
Reported by Bandit.
Line: 12
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b405-import-xml-etree
import os
import pwd
import xml.etree.ElementTree as et
from bases.FrameworkServices.SimpleService import SimpleService
from bases.collection import find_binary
disabled_by_default = True
Reported by Bandit.
Line: 17
Column: 1
from bases.FrameworkServices.SimpleService import SimpleService
from bases.collection import find_binary
disabled_by_default = True
NVIDIA_SMI = 'nvidia-smi'
EMPTY_ROW = ''
EMPTY_ROW_LIMIT = 500
Reported by Pylint.
Line: 54
Column: 1
]
def gpu_charts(gpu):
fam = gpu.full_name()
charts = {
PCI_BANDWIDTH: {
'options': [None, 'PCI Express Bandwidth Utilization', 'KiB/s', fam, 'nvidia_smi.pci_bandwidth', 'area'],
Reported by Pylint.
Line: 59
Column: 1
charts = {
PCI_BANDWIDTH: {
'options': [None, 'PCI Express Bandwidth Utilization', 'KiB/s', fam, 'nvidia_smi.pci_bandwidth', 'area'],
'lines': [
['rx_util', 'rx', 'absolute', 1, 1],
['tx_util', 'tx', 'absolute', 1, -1],
]
},
Reported by Pylint.
Line: 72
Column: 1
]
},
GPU_UTIL: {
'options': [None, 'GPU Utilization', 'percentage', fam, 'nvidia_smi.gpu_utilization', 'line'],
'lines': [
['gpu_util', 'utilization'],
]
},
MEM_UTIL: {
Reported by Pylint.
collectors/python.d.plugin/python_modules/pyyaml2/__init__.py
73 issues
Line: 3
Column: 1
# SPDX-License-Identifier: MIT
from error import *
from tokens import *
from events import *
from nodes import *
from loader import *
Reported by Pylint.
Line: 5
Column: 1
from error import *
from tokens import *
from events import *
from nodes import *
from loader import *
from dumper import *
Reported by Pylint.
Line: 6
Column: 1
from error import *
from tokens import *
from events import *
from nodes import *
from loader import *
from dumper import *
Reported by Pylint.
Line: 7
Column: 1
from tokens import *
from events import *
from nodes import *
from loader import *
from dumper import *
__version__ = '3.11'
Reported by Pylint.
Line: 9
Column: 1
from events import *
from nodes import *
from loader import *
from dumper import *
__version__ = '3.11'
try:
Reported by Pylint.
Line: 10
Column: 1
from nodes import *
from loader import *
from dumper import *
__version__ = '3.11'
try:
from cyaml import *
Reported by Pylint.
Line: 20
Column: 25
except ImportError:
__with_libyaml__ = False
def scan(stream, Loader=Loader):
"""
Scan a YAML stream and produce scanning tokens.
"""
loader = Loader(stream)
try:
Reported by Pylint.
Line: 31
Column: 26
finally:
loader.dispose()
def parse(stream, Loader=Loader):
"""
Parse a YAML stream and produce parsing events.
"""
loader = Loader(stream)
try:
Reported by Pylint.
Line: 42
Column: 28
finally:
loader.dispose()
def compose(stream, Loader=Loader):
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
"""
loader = Loader(stream)
Reported by Pylint.
Line: 53
Column: 32
finally:
loader.dispose()
def compose_all(stream, Loader=Loader):
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
"""
loader = Loader(stream)
Reported by Pylint.
collectors/python.d.plugin/postgres/postgres.chart.py
71 issues
Line: 18
Column: 1
except ImportError:
PSYCOPG2 = False
from bases.FrameworkServices.SimpleService import SimpleService
DEFAULT_PORT = 5432
DEFAULT_USER = 'postgres'
DEFAULT_CONNECT_TIMEOUT = 2 # seconds
DEFAULT_STATEMENT_TIMEOUT = 5000 # ms
Reported by Pylint.
Line: 235
Column: 46
DEFAULT: """
SELECT
CAST(COUNT(*) AS INT) AS file_count,
CAST(COALESCE(SUM(CAST(archive_file ~ $r$\.ready$$r$ as INT)),0) AS INT) AS ready_count,
CAST(COALESCE(SUM(CAST(archive_file ~ $r$\.done$$r$ AS INT)),0) AS INT) AS done_count
FROM
pg_catalog.pg_ls_dir('pg_wal/archive_status') AS archive_files (archive_file);
""",
V96: """
Reported by Pylint.
Line: 236
Column: 46
SELECT
CAST(COUNT(*) AS INT) AS file_count,
CAST(COALESCE(SUM(CAST(archive_file ~ $r$\.ready$$r$ as INT)),0) AS INT) AS ready_count,
CAST(COALESCE(SUM(CAST(archive_file ~ $r$\.done$$r$ AS INT)),0) AS INT) AS done_count
FROM
pg_catalog.pg_ls_dir('pg_wal/archive_status') AS archive_files (archive_file);
""",
V96: """
SELECT
Reported by Pylint.
Line: 243
Column: 46
V96: """
SELECT
CAST(COUNT(*) AS INT) AS file_count,
CAST(COALESCE(SUM(CAST(archive_file ~ $r$\.ready$$r$ as INT)),0) AS INT) AS ready_count,
CAST(COALESCE(SUM(CAST(archive_file ~ $r$\.done$$r$ AS INT)),0) AS INT) AS done_count
FROM
pg_catalog.pg_ls_dir('pg_xlog/archive_status') AS archive_files (archive_file);
""",
Reported by Pylint.
Line: 244
Column: 46
SELECT
CAST(COUNT(*) AS INT) AS file_count,
CAST(COALESCE(SUM(CAST(archive_file ~ $r$\.ready$$r$ as INT)),0) AS INT) AS ready_count,
CAST(COALESCE(SUM(CAST(archive_file ~ $r$\.done$$r$ AS INT)),0) AS INT) AS done_count
FROM
pg_catalog.pg_ls_dir('pg_xlog/archive_status') AS archive_files (archive_file);
""",
}
Reported by Pylint.
Line: 458
Column: 34
WHERE
has_database_privilege(
(SELECT current_user), datname, 'connect')
AND NOT datname ~* '^template\d';
""",
}
QUERY_STANDBY = {
DEFAULT: """
Reported by Pylint.
Line: 1150
Column: 16
try:
self.check_queries()
except Exception as error:
self.error(error)
return False
self.populate_queries()
self.create_dynamic_charts()
Reported by Pylint.
Line: 1
Column: 1
# -*- coding: utf-8 -*-
# Description: example netdata python.d module
# Authors: facetoe, dangtranhoang
# SPDX-License-Identifier: GPL-3.0-or-later
from copy import deepcopy
try:
import psycopg2
Reported by Pylint.
Line: 1
Column: 1
# -*- coding: utf-8 -*-
# Description: example netdata python.d module
# Authors: facetoe, dangtranhoang
# SPDX-License-Identifier: GPL-3.0-or-later
from copy import deepcopy
try:
import psycopg2
Reported by Pylint.
Line: 30
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b105_hardcoded_password_string.html
CONN_PARAM_PORT = 'port'
CONN_PARAM_DATABASE = 'database'
CONN_PARAM_USER = 'user'
CONN_PARAM_PASSWORD = 'password'
CONN_PARAM_CONN_TIMEOUT = 'connect_timeout'
CONN_PARAM_STATEMENT_TIMEOUT = 'statement_timeout'
CONN_PARAM_SSL_MODE = 'sslmode'
CONN_PARAM_SSL_ROOT_CERT = 'sslrootcert'
CONN_PARAM_SSL_CRL = 'sslcrl'
Reported by Bandit.
collectors/python.d.plugin/python_modules/urllib3/contrib/pyopenssl.py
70 issues
Line: 59
Column: 5
from socket import _fileobject
except ImportError: # Platform-specific: Python 3
_fileobject = None
from ..packages.backports.makefile import backport_makefile
import logging
import ssl
try:
Reported by Pylint.
Line: 67
Column: 5
try:
import six
except ImportError:
from ..packages import six
import sys
from .. import util
Reported by Pylint.
Line: 71
Column: 1
import sys
from .. import util
__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
# SNI always works.
HAS_SNI = True
Reported by Pylint.
Line: 205
Column: 38
except x509.ExtensionNotFound:
# No such extension, return the empty list.
return []
except (x509.DuplicateExtension, x509.UnsupportedExtension,
x509.UnsupportedGeneralNameType, UnicodeError) as e:
# A problem has been found with the quality of the certificate. Assume
# no SAN field is present.
log.warning(
"A problem was encountered with the certificate that prevented "
Reported by Pylint.
Line: 151
Column: 5
# pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
# attribute is only present on those versions.
from OpenSSL.crypto import X509
x509 = X509()
if getattr(x509, "_x509", None) is None:
raise ImportError("'pyOpenSSL' module missing required functionality. "
"Try upgrading to v0.14 or newer.")
Reported by Pylint.
Line: 194
Column: 42
# Pass the cert to cryptography, which has much better APIs for this.
# This is technically using private APIs, but should work across all
# relevant versions until PyOpenSSL gets something proper for this.
cert = _Certificate(openssl_backend, peer_cert._x509)
# We want to find the SAN extension. Ask Cryptography to locate it (it's
# faster than looping in Python)
try:
ext = cert.extensions.get_extension_for_class(
Reported by Pylint.
Line: 266
Column: 17
if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
return b''
else:
raise SocketError(str(e))
except OpenSSL.SSL.ZeroReturnError as e:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return b''
else:
raise
Reported by Pylint.
Line: 275
Column: 17
except OpenSSL.SSL.WantReadError:
rd = util.wait_for_read(self.socket, self.socket.gettimeout())
if not rd:
raise timeout('The read operation timed out')
else:
return self.recv(*args, **kwargs)
else:
return data
Reported by Pylint.
Line: 288
Column: 17
if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
return 0
else:
raise SocketError(str(e))
except OpenSSL.SSL.ZeroReturnError as e:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return 0
else:
raise
Reported by Pylint.
Line: 297
Column: 17
except OpenSSL.SSL.WantReadError:
rd = util.wait_for_read(self.socket, self.socket.gettimeout())
if not rd:
raise timeout('The read operation timed out')
else:
return self.recv_into(*args, **kwargs)
def settimeout(self, timeout):
return self.socket.settimeout(timeout)
Reported by Pylint.
collectors/python.d.plugin/web_log/web_log.chart.py
68 issues
Line: 23
Column: 1
except ImportError:
from sys import maxsize as maxint
from bases.collection import read_last_line
from bases.FrameworkServices.LogService import LogService
ORDER_APACHE_CACHE = [
'apache_cache',
]
Reported by Pylint.
Line: 24
Column: 1
from sys import maxsize as maxint
from bases.collection import read_last_line
from bases.FrameworkServices.LogService import LogService
ORDER_APACHE_CACHE = [
'apache_cache',
]
Reported by Pylint.
Line: 15
Column: 5
try:
from itertools import filterfalse
except ImportError:
from itertools import ifilter as filter
from itertools import ifilterfalse as filterfalse
try:
from sys import maxint
except ImportError:
Reported by Pylint.
Line: 386
Column: 13
self.job = log_types[log_type](self)
if self.job.check():
self.order = self.job.order
self.definitions = self.job.definitions
return True
return False
def _get_data(self):
Reported by Pylint.
Line: 387
Column: 13
self.job = log_types[log_type](self)
if self.job.check():
self.order = self.job.order
self.definitions = self.job.definitions
return True
return False
def _get_data(self):
return self.job.get_data(self._get_raw_data())
Reported by Pylint.
Line: 1
Column: 1
# -*- coding: utf-8 -*-
# Description: web log netdata python.d module
# Author: ilyam8
# SPDX-License-Identifier: GPL-3.0-or-later
import bisect
import os
import re
from collections import namedtuple, defaultdict
Reported by Pylint.
Line: 1
Column: 1
# -*- coding: utf-8 -*-
# Description: web log netdata python.d module
# Author: ilyam8
# SPDX-License-Identifier: GPL-3.0-or-later
import bisect
import os
import re
from collections import namedtuple, defaultdict
Reported by Pylint.
Line: 70
Column: 1
CHARTS_WEB = {
'response_codes': {
'options': [None, 'Response Codes', 'requests/s', 'responses', 'web_log.response_codes', 'stacked'],
'lines': [
['2xx', None, 'incremental'],
['5xx', None, 'incremental'],
['3xx', None, 'incremental'],
['4xx', None, 'incremental'],
Reported by Pylint.
Line: 89
Column: 1
]
},
'response_time': {
'options': [None, 'Processing Time', 'milliseconds', 'timings', 'web_log.response_time', 'area'],
'lines': [
['resp_time_min', 'min', 'incremental', 1, 1000],
['resp_time_max', 'max', 'incremental', 1, 1000],
['resp_time_avg', 'avg', 'incremental', 1, 1000]
]
Reported by Pylint.
Line: 97
Column: 1
]
},
'response_time_hist': {
'options': [None, 'Processing Time Histogram', 'requests/s', 'timings', 'web_log.response_time_hist', 'line'],
'lines': []
},
'response_time_upstream': {
'options': [None, 'Processing Time Upstream', 'milliseconds', 'timings',
'web_log.response_time_upstream', 'area'],
Reported by Pylint.
collectors/python.d.plugin/python_modules/pyyaml2/loader.py
67 issues
Line: 5
Column: 1
__all__ = ['BaseLoader', 'SafeLoader', 'Loader']
from reader import *
from scanner import *
from parser import *
from composer import *
from constructor import *
from resolver import *
Reported by Pylint.
Line: 6
Column: 1
__all__ = ['BaseLoader', 'SafeLoader', 'Loader']
from reader import *
from scanner import *
from parser import *
from composer import *
from constructor import *
from resolver import *
Reported by Pylint.
Line: 8
Column: 1
from reader import *
from scanner import *
from parser import *
from composer import *
from constructor import *
from resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
Reported by Pylint.
Line: 9
Column: 1
from scanner import *
from parser import *
from composer import *
from constructor import *
from resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reported by Pylint.
Line: 10
Column: 1
from parser import *
from composer import *
from constructor import *
from resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Reported by Pylint.
Line: 12
Column: 35
from constructor import *
from resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Reported by Pylint.
Line: 12
Column: 18
from constructor import *
from resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Reported by Pylint.
Line: 12
Column: 26
from constructor import *
from resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Reported by Pylint.
Line: 12
Column: 43
from constructor import *
from resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Reported by Pylint.
Line: 12
Column: 70
from constructor import *
from resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Reported by Pylint.