The following issues were found
pipenv/patched/notpip/_vendor/requests/models.py
86 issues
Line: 25
Column: 1
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from io import UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
Reported by Pylint.
Line: 26
Column: 1
from io import UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
Reported by Pylint.
Line: 28
Column: 1
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from ._internal_utils import to_native_string, unicode_is_ascii
Reported by Pylint.
Line: 29
Column: 1
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
Reported by Pylint.
Line: 30
Column: 1
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
Reported by Pylint.
Line: 33
Column: 1
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, check_header_validity)
from .compat import (
Reported by Pylint.
Line: 34
Column: 1
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, check_header_validity)
from .compat import (
Callable, Mapping,
Reported by Pylint.
Line: 38
Column: 1
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, check_header_validity)
from .compat import (
Callable, Mapping,
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes
Reported by Pylint.
Line: 42
Column: 1
Callable, Mapping,
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
Reported by Pylint.
Line: 43
Column: 1
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
Reported by Pylint.
pipenv/vendor/cerberus/tests/test_normalization.py
85 issues
Line: 6
Column: 1
from copy import deepcopy
from tempfile import NamedTemporaryFile
from pytest import mark
from cerberus import Validator, errors
from cerberus.tests import (
assert_fail,
assert_has_error,
Reported by Pylint.
Line: 8
Column: 1
from pytest import mark
from cerberus import Validator, errors
from cerberus.tests import (
assert_fail,
assert_has_error,
assert_normalized,
assert_success,
Reported by Pylint.
Line: 9
Column: 1
from pytest import mark
from cerberus import Validator, errors
from cerberus.tests import (
assert_fail,
assert_has_error,
assert_normalized,
assert_success,
)
Reported by Pylint.
Line: 154
Column: 38
schema = {'foo': {'coerce': [hex, dont_do_me]}}
validator({'foo': '0'}, schema)
assert errors.COERCION_FAILED in validator._errors
def test_coerce_non_digit_in_sequence(validator):
# https://github.com/pyeve/cerberus/issues/211
schema = {'data': {'type': 'list', 'schema': {'type': 'integer', 'coerce': int}}}
Reported by Pylint.
Line: 182
Column: 38
assert_normalized(document, document, schema)
validator({'foo': 2}, schema)
assert errors.COERCION_FAILED in validator._errors
def test_normalized():
schema = {'amount': {'coerce': int}}
document = {'amount': '2'}
Reported by Pylint.
Line: 376
Column: 45
'b': {'type': 'integer', 'default_setter': lambda d: d['a'] + 1},
}
validator({}, schema)
assert errors.SETTING_DEFAULT_FAILED in validator._errors
def test_issue_250():
# https://github.com/pyeve/cerberus/issues/250
schema = {
Reported by Pylint.
Line: 1
Column: 1
# -*- coding: utf-8 -*-
from copy import deepcopy
from tempfile import NamedTemporaryFile
from pytest import mark
from cerberus import Validator, errors
from cerberus.tests import (
Reported by Pylint.
Line: 17
Column: 1
)
def must_not_be_called(*args, **kwargs):
raise RuntimeError('This shall not be called.')
def test_coerce():
schema = {'amount': {'coerce': int}}
Reported by Pylint.
Line: 21
Column: 1
raise RuntimeError('This shall not be called.')
def test_coerce():
schema = {'amount': {'coerce': int}}
document = {'amount': '1'}
expected = {'amount': 1}
assert_normalized(document, expected, schema)
Reported by Pylint.
Line: 28
Column: 1
assert_normalized(document, expected, schema)
def test_coerce_in_dictschema():
schema = {'thing': {'type': 'dict', 'schema': {'amount': {'coerce': int}}}}
document = {'thing': {'amount': '2'}}
expected = {'thing': {'amount': 2}}
assert_normalized(document, expected, schema)
Reported by Pylint.
pipenv/vendor/pythonfinder/models/path.py
85 issues
Line: 12
Column: 1
import attr
import six
from cached_property import cached_property
from ..compat import Path, fs_str
from ..environment import (
ASDF_DATA_DIR,
ASDF_INSTALLED,
Reported by Pylint.
Line: 13
Column: 1
import attr
import six
from cached_property import cached_property
from ..compat import Path, fs_str
from ..environment import (
ASDF_DATA_DIR,
ASDF_INSTALLED,
MYPY_RUNNING,
Reported by Pylint.
Line: 15
Column: 1
from cached_property import cached_property
from ..compat import Path, fs_str
from ..environment import (
ASDF_DATA_DIR,
ASDF_INSTALLED,
MYPY_RUNNING,
PYENV_INSTALLED,
PYENV_ROOT,
Reported by Pylint.
Line: 24
Column: 1
SHIM_PATHS,
get_shim_paths,
)
from ..exceptions import InvalidPythonVersion
from ..utils import (
Iterable,
Sequence,
dedup,
ensure_path,
Reported by Pylint.
Line: 25
Column: 1
get_shim_paths,
)
from ..exceptions import InvalidPythonVersion
from ..utils import (
Iterable,
Sequence,
dedup,
ensure_path,
filter_pythons,
Reported by Pylint.
Line: 40
Column: 1
split_version_and_name,
unnest,
)
from .mixins import BaseFinder, BasePath
if MYPY_RUNNING:
from typing import (
Optional,
Dict,
Reported by Pylint.
Line: 57
Column: 5
Any,
TypeVar,
)
from .python import PythonFinder, PythonVersion
from .windows import WindowsFinder
FinderType = TypeVar("FinderType", BaseFinder, PythonFinder, WindowsFinder)
ChildType = Union[PythonFinder, "PathEntry"]
PathType = Union[PythonFinder, "PathEntry"]
Reported by Pylint.
Line: 58
Column: 5
TypeVar,
)
from .python import PythonFinder, PythonVersion
from .windows import WindowsFinder
FinderType = TypeVar("FinderType", BaseFinder, PythonFinder, WindowsFinder)
ChildType = Union[PythonFinder, "PathEntry"]
PathType = Union[PythonFinder, "PathEntry"]
Reported by Pylint.
Line: 191
Column: 25
continue
if entry not in self._version_dict[version] and entry.is_python:
self._version_dict[version].append(entry)
for p, entry in self.python_executables.items():
version = entry.as_python # type: PythonVersion
if not version:
continue
if not isinstance(version, tuple):
version = version.version_tuple
Reported by Pylint.
Line: 313
Column: 9
# type: () -> "SystemPath"
if "asdf" in self.finders and self.asdf_finder is not None:
return self
from .python import PythonFinder
os_path = os.environ["PATH"].split(os.pathsep)
asdf_finder = PythonFinder.create(
root=ASDF_DATA_DIR,
ignore_unsupported=True,
Reported by Pylint.
pipenv/vendor/distlib/database.py
84 issues
Line: 20
Column: 1
import sys
import zipimport
from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
Reported by Pylint.
Line: 21
Column: 1
import zipimport
from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter)
Reported by Pylint.
Line: 22
Column: 1
from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter)
Reported by Pylint.
Line: 23
Column: 1
from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter)
Reported by Pylint.
Line: 25
Column: 1
from .version import get_scheme, UnsupportedVersionError
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter)
__all__ = ['Distribution', 'BaseInstalledDistribution',
'InstalledDistribution', 'EggInfoDistribution',
Reported by Pylint.
Line: 942
Column: 24
requires = parse_requires_path(req_path)
else:
# FIXME handle the case where zipfile is not available
zipf = zipimport.zipimporter(path)
fileobj = StringIO(
zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
metadata = Metadata(fileobj=fileobj, scheme='legacy')
try:
data = zipf.get_data('EGG-INFO/requires.txt')
Reported by Pylint.
Line: 267
Column: 17
try:
matcher = self._scheme.matcher('%s (%s)' % (name, version))
except ValueError:
raise DistlibException('invalid name or version: %r, %r' %
(name, version))
for dist in self.get_distributions():
# We hit a problem on Travis where enum34 was installed and doesn't
# have a provides attribute ...
Reported by Pylint.
Line: 421
Column: 3
try:
matcher = scheme.matcher(r.requirement)
except UnsupportedVersionError:
# XXX compat-mode if cannot read the version
logger.warning('could not read version %r - using name only',
req)
name = req.split()[0]
matcher = scheme.matcher(name)
Reported by Pylint.
Line: 817
Column: 38
:type path: str
:rtype: str
"""
# Check if it is an absolute path # XXX use relpath, add tests
if path.find(os.sep) >= 0:
# it's an absolute path?
distinfo_dirname, path = path.split(os.sep)[-2:]
if distinfo_dirname != self.path.split(os.sep)[-1]:
raise DistlibException(
Reported by Pylint.
Line: 842
Column: 29
:returns: iterator of paths
"""
base = os.path.dirname(self.path)
for path, checksum, size in self._get_records():
# XXX add separator or use real relpath algo
if not os.path.isabs(path):
path = os.path.join(base, path)
if path.startswith(self.path):
yield path
Reported by Pylint.
pipenv/patched/notpip/_vendor/distlib/database.py
84 issues
Line: 20
Column: 1
import sys
import zipimport
from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
Reported by Pylint.
Line: 21
Column: 1
import zipimport
from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter)
Reported by Pylint.
Line: 22
Column: 1
from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter)
Reported by Pylint.
Line: 23
Column: 1
from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter)
Reported by Pylint.
Line: 25
Column: 1
from .version import get_scheme, UnsupportedVersionError
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter)
__all__ = ['Distribution', 'BaseInstalledDistribution',
'InstalledDistribution', 'EggInfoDistribution',
Reported by Pylint.
Line: 942
Column: 24
requires = parse_requires_path(req_path)
else:
# FIXME handle the case where zipfile is not available
zipf = zipimport.zipimporter(path)
fileobj = StringIO(
zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
metadata = Metadata(fileobj=fileobj, scheme='legacy')
try:
data = zipf.get_data('EGG-INFO/requires.txt')
Reported by Pylint.
Line: 267
Column: 17
try:
matcher = self._scheme.matcher('%s (%s)' % (name, version))
except ValueError:
raise DistlibException('invalid name or version: %r, %r' %
(name, version))
for dist in self.get_distributions():
# We hit a problem on Travis where enum34 was installed and doesn't
# have a provides attribute ...
Reported by Pylint.
Line: 421
Column: 3
try:
matcher = scheme.matcher(r.requirement)
except UnsupportedVersionError:
# XXX compat-mode if cannot read the version
logger.warning('could not read version %r - using name only',
req)
name = req.split()[0]
matcher = scheme.matcher(name)
Reported by Pylint.
Line: 817
Column: 38
:type path: str
:rtype: str
"""
# Check if it is an absolute path # XXX use relpath, add tests
if path.find(os.sep) >= 0:
# it's an absolute path?
distinfo_dirname, path = path.split(os.sep)[-2:]
if distinfo_dirname != self.path.split(os.sep)[-1]:
raise DistlibException(
Reported by Pylint.
Line: 842
Column: 29
:returns: iterator of paths
"""
base = os.path.dirname(self.path)
for path, checksum, size in self._get_records():
# XXX add separator or use real relpath algo
if not os.path.isabs(path):
path = os.path.join(base, path)
if path.startswith(self.path):
yield path
Reported by Pylint.
pipenv/patched/notpip/_vendor/webencodings/tests.py
84 issues
Line: 16
Column: 1
from __future__ import unicode_literals
from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode,
IncrementalDecoder, IncrementalEncoder, UTF8)
def assert_raises(exception, function, *args, **kwargs):
try:
Reported by Pylint.
Line: 108
Column: 31
def test_iter_decode():
def iter_decode_to_string(input, fallback_encoding):
output, _encoding = iter_decode(input, fallback_encoding)
return ''.join(output)
assert iter_decode_to_string([], 'latin1') == ''
assert iter_decode_to_string([b''], 'latin1') == ''
assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é'
Reported by Pylint.
Line: 20
Column: 1
IncrementalDecoder, IncrementalEncoder, UTF8)
def assert_raises(exception, function, *args, **kwargs):
try:
function(*args, **kwargs)
except exception:
return
else: # pragma: no cover
Reported by Pylint.
Line: 29
Column: 1
raise AssertionError('Did not raise %s.' % exception)
def test_labels():
assert lookup('utf-8').name == 'utf-8'
assert lookup('Utf-8').name == 'utf-8'
assert lookup('UTF-8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
Reported by Pylint.
Line: 30
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def test_labels():
assert lookup('utf-8').name == 'utf-8'
assert lookup('Utf-8').name == 'utf-8'
assert lookup('UTF-8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8 ').name == 'utf-8'
Reported by Bandit.
Line: 31
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def test_labels():
assert lookup('utf-8').name == 'utf-8'
assert lookup('Utf-8').name == 'utf-8'
assert lookup('UTF-8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8 ').name == 'utf-8'
assert lookup(' \r\nutf8\t').name == 'utf-8'
Reported by Bandit.
Line: 32
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def test_labels():
assert lookup('utf-8').name == 'utf-8'
assert lookup('Utf-8').name == 'utf-8'
assert lookup('UTF-8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8 ').name == 'utf-8'
assert lookup(' \r\nutf8\t').name == 'utf-8'
assert lookup('u8') is None # Python label.
Reported by Bandit.
Line: 33
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert lookup('utf-8').name == 'utf-8'
assert lookup('Utf-8').name == 'utf-8'
assert lookup('UTF-8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8 ').name == 'utf-8'
assert lookup(' \r\nutf8\t').name == 'utf-8'
assert lookup('u8') is None # Python label.
assert lookup('utf-8 ') is None # Non-ASCII white space.
Reported by Bandit.
Line: 34
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert lookup('Utf-8').name == 'utf-8'
assert lookup('UTF-8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8 ').name == 'utf-8'
assert lookup(' \r\nutf8\t').name == 'utf-8'
assert lookup('u8') is None # Python label.
assert lookup('utf-8 ') is None # Non-ASCII white space.
Reported by Bandit.
Line: 35
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert lookup('UTF-8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8').name == 'utf-8'
assert lookup('utf8 ').name == 'utf-8'
assert lookup(' \r\nutf8\t').name == 'utf-8'
assert lookup('u8') is None # Python label.
assert lookup('utf-8 ') is None # Non-ASCII white space.
assert lookup('US-ASCII').name == 'windows-1252'
Reported by Bandit.
pipenv/patched/yaml3/loader.py
83 issues
Line: 4
Column: 1
__all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader']
from .reader import *
from .scanner import *
from .parser import *
from .composer import *
from .constructor import *
from .resolver import *
Reported by Pylint.
Line: 5
Column: 1
__all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader']
from .reader import *
from .scanner import *
from .parser import *
from .composer import *
from .constructor import *
from .resolver import *
Reported by Pylint.
Line: 6
Column: 1
from .reader import *
from .scanner import *
from .parser import *
from .composer import *
from .constructor import *
from .resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
Reported by Pylint.
Line: 7
Column: 1
from .reader import *
from .scanner import *
from .parser import *
from .composer import *
from .constructor import *
from .resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
Reported by Pylint.
Line: 8
Column: 1
from .scanner import *
from .parser import *
from .composer import *
from .constructor import *
from .resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reported by Pylint.
Line: 9
Column: 1
from .parser import *
from .composer import *
from .constructor import *
from .resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Reported by Pylint.
Line: 11
Column: 70
from .constructor import *
from .resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Reported by Pylint.
Line: 11
Column: 18
from .constructor import *
from .resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Reported by Pylint.
Line: 11
Column: 43
from .constructor import *
from .resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Reported by Pylint.
Line: 11
Column: 35
from .constructor import *
from .resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Reported by Pylint.
pipenv/patched/yaml3/__init__.py
82 issues
Line: 2
Column: 1
from .error import *
from .tokens import *
from .events import *
from .nodes import *
from .loader import *
from .dumper import *
Reported by Pylint.
Line: 4
Column: 1
from .error import *
from .tokens import *
from .events import *
from .nodes import *
from .loader import *
from .dumper import *
Reported by Pylint.
Line: 5
Column: 1
from .error import *
from .tokens import *
from .events import *
from .nodes import *
from .loader import *
from .dumper import *
Reported by Pylint.
Line: 6
Column: 1
from .tokens import *
from .events import *
from .nodes import *
from .loader import *
from .dumper import *
__version__ = '5.4'
Reported by Pylint.
Line: 8
Column: 1
from .events import *
from .nodes import *
from .loader import *
from .dumper import *
__version__ = '5.4'
try:
from .cyaml import *
Reported by Pylint.
Line: 9
Column: 1
from .nodes import *
from .loader import *
from .dumper import *
__version__ = '5.4'
try:
from .cyaml import *
__with_libyaml__ = True
Reported by Pylint.
Line: 58
Column: 25
warnings.warn(message, YAMLLoadWarning, stacklevel=3)
#------------------------------------------------------------------------------
def scan(stream, Loader=Loader):
"""
Scan a YAML stream and produce scanning tokens.
"""
loader = Loader(stream)
try:
Reported by Pylint.
Line: 69
Column: 26
finally:
loader.dispose()
def parse(stream, Loader=Loader):
"""
Parse a YAML stream and produce parsing events.
"""
loader = Loader(stream)
try:
Reported by Pylint.
Line: 80
Column: 28
finally:
loader.dispose()
def compose(stream, Loader=Loader):
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
"""
loader = Loader(stream)
Reported by Pylint.
Line: 91
Column: 32
finally:
loader.dispose()
def compose_all(stream, Loader=Loader):
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
"""
loader = Loader(stream)
Reported by Pylint.
pipenv/vendor/requirementslib/models/markers.py
82 issues
Line: 7
Column: 1
import re
import attr
import distlib.markers
import packaging.version
import six
from packaging.markers import InvalidMarker, Marker
from packaging.specifiers import Specifier, SpecifierSet
from vistir.compat import Mapping, Set, lru_cache
Reported by Pylint.
Line: 8
Column: 1
import attr
import distlib.markers
import packaging.version
import six
from packaging.markers import InvalidMarker, Marker
from packaging.specifiers import Specifier, SpecifierSet
from vistir.compat import Mapping, Set, lru_cache
from vistir.misc import dedup
Reported by Pylint.
Line: 10
Column: 1
import distlib.markers
import packaging.version
import six
from packaging.markers import InvalidMarker, Marker
from packaging.specifiers import Specifier, SpecifierSet
from vistir.compat import Mapping, Set, lru_cache
from vistir.misc import dedup
from ..environment import MYPY_RUNNING
Reported by Pylint.
Line: 11
Column: 1
import packaging.version
import six
from packaging.markers import InvalidMarker, Marker
from packaging.specifiers import Specifier, SpecifierSet
from vistir.compat import Mapping, Set, lru_cache
from vistir.misc import dedup
from ..environment import MYPY_RUNNING
from ..exceptions import RequirementError
Reported by Pylint.
Line: 12
Column: 1
import six
from packaging.markers import InvalidMarker, Marker
from packaging.specifiers import Specifier, SpecifierSet
from vistir.compat import Mapping, Set, lru_cache
from vistir.misc import dedup
from ..environment import MYPY_RUNNING
from ..exceptions import RequirementError
from .utils import filter_none, validate_markers
Reported by Pylint.
Line: 13
Column: 1
from packaging.markers import InvalidMarker, Marker
from packaging.specifiers import Specifier, SpecifierSet
from vistir.compat import Mapping, Set, lru_cache
from vistir.misc import dedup
from ..environment import MYPY_RUNNING
from ..exceptions import RequirementError
from .utils import filter_none, validate_markers
Reported by Pylint.
Line: 15
Column: 1
from vistir.compat import Mapping, Set, lru_cache
from vistir.misc import dedup
from ..environment import MYPY_RUNNING
from ..exceptions import RequirementError
from .utils import filter_none, validate_markers
from six.moves import reduce # isort:skip
Reported by Pylint.
Line: 16
Column: 1
from vistir.misc import dedup
from ..environment import MYPY_RUNNING
from ..exceptions import RequirementError
from .utils import filter_none, validate_markers
from six.moves import reduce # isort:skip
Reported by Pylint.
Line: 17
Column: 1
from ..environment import MYPY_RUNNING
from ..exceptions import RequirementError
from .utils import filter_none, validate_markers
from six.moves import reduce # isort:skip
if MYPY_RUNNING:
Reported by Pylint.
Line: 86
Column: 28
@property
def pipfile_part(self):
return {"markers": self.as_line}
@classmethod
def make_marker(cls, marker_string):
try:
marker = Marker(marker_string)
Reported by Pylint.
pipenv/vendor/pexpect/expect.py
82 issues
Line: 3
Column: 1
import time
from .exceptions import EOF, TIMEOUT
class Expecter(object):
def __init__(self, spawn, searcher, searchwindowsize=-1):
self.spawn = spawn
self.searcher = searcher
# A value of -1 means to use the figure from spawn, which should
Reported by Pylint.
Line: 272
Column: 26
offset = -(freshlen + len(s))
else:
# better obey searchwindowsize
offset = -searchwindowsize
n = buffer.find(s, offset)
if n >= 0 and (first_match is None or n < first_match):
first_match = n
best_index, best_match = index, s
if first_match is None:
Reported by Pylint.
Line: 25
Column: 13
freshlen = len(window)
index = searcher.search(window, freshlen, self.searchwindowsize)
if index >= 0:
spawn._buffer = spawn.buffer_type()
spawn._buffer.write(window[searcher.end:])
spawn.before = spawn._before.getvalue()[
0:-(len(window) - searcher.start)]
spawn._before = spawn.buffer_type()
spawn._before.write(window[searcher.end:])
Reported by Pylint.
Line: 26
Column: 13
index = searcher.search(window, freshlen, self.searchwindowsize)
if index >= 0:
spawn._buffer = spawn.buffer_type()
spawn._buffer.write(window[searcher.end:])
spawn.before = spawn._before.getvalue()[
0:-(len(window) - searcher.start)]
spawn._before = spawn.buffer_type()
spawn._before.write(window[searcher.end:])
spawn.after = window[searcher.start:searcher.end]
Reported by Pylint.
Line: 27
Column: 28
if index >= 0:
spawn._buffer = spawn.buffer_type()
spawn._buffer.write(window[searcher.end:])
spawn.before = spawn._before.getvalue()[
0:-(len(window) - searcher.start)]
spawn._before = spawn.buffer_type()
spawn._before.write(window[searcher.end:])
spawn.after = window[searcher.start:searcher.end]
spawn.match = searcher.match
Reported by Pylint.
Line: 29
Column: 13
spawn._buffer.write(window[searcher.end:])
spawn.before = spawn._before.getvalue()[
0:-(len(window) - searcher.start)]
spawn._before = spawn.buffer_type()
spawn._before.write(window[searcher.end:])
spawn.after = window[searcher.start:searcher.end]
spawn.match = searcher.match
spawn.match_index = index
# Found a match
Reported by Pylint.
Line: 30
Column: 13
spawn.before = spawn._before.getvalue()[
0:-(len(window) - searcher.start)]
spawn._before = spawn.buffer_type()
spawn._before.write(window[searcher.end:])
spawn.after = window[searcher.start:searcher.end]
spawn.match = searcher.match
spawn.match_index = index
# Found a match
return index
Reported by Pylint.
Line: 38
Column: 16
return index
elif self.searchwindowsize or self.lookback:
maintain = self.searchwindowsize or self.lookback
if spawn._buffer.tell() > maintain:
spawn._buffer = spawn.buffer_type()
spawn._buffer.write(window[-maintain:])
def existing_data(self):
# First call from a new call to expect_loop or expect_async.
Reported by Pylint.
Line: 39
Column: 17
elif self.searchwindowsize or self.lookback:
maintain = self.searchwindowsize or self.lookback
if spawn._buffer.tell() > maintain:
spawn._buffer = spawn.buffer_type()
spawn._buffer.write(window[-maintain:])
def existing_data(self):
# First call from a new call to expect_loop or expect_async.
# self.searchwindowsize may have changed.
Reported by Pylint.
Line: 40
Column: 17
maintain = self.searchwindowsize or self.lookback
if spawn._buffer.tell() > maintain:
spawn._buffer = spawn.buffer_type()
spawn._buffer.write(window[-maintain:])
def existing_data(self):
# First call from a new call to expect_loop or expect_async.
# self.searchwindowsize may have changed.
# Treat all data as fresh.
Reported by Pylint.