The following issues were found
pipenv/patched/notpip/_vendor/pkg_resources/__init__.py
277 issues
Line: 58
Column: 1
FileExistsError = OSError
from pipenv.patched.notpip._vendor import six
from pipenv.patched.notpip._vendor.six.moves import urllib, map, filter
# capture these to bypass sandboxing
from os import utime
try:
from os import mkdir, rename, unlink
Reported by Pylint.
Line: 357
Column: 49
def get_provider(moduleOrReq):
"""Return an IResourceProvider for the named module or requirement"""
if isinstance(moduleOrReq, Requirement):
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
try:
module = sys.modules[moduleOrReq]
except KeyError:
__import__(moduleOrReq)
module = sys.modules[moduleOrReq]
Reported by Pylint.
Line: 469
Column: 5
name = ns['__name__']
ns.clear()
ns['__name__'] = name
require(dist_spec)[0].run_script(script_name, ns)
# backward compatibility
run_main = run_script
Reported by Pylint.
Line: 503
Column: 5
class IMetadataProvider:
def has_metadata(name):
"""Does the package's distribution contain the named metadata?"""
def get_metadata(name):
"""The named metadata resource as a string"""
Reported by Pylint.
Line: 506
Column: 5
def has_metadata(name):
"""Does the package's distribution contain the named metadata?"""
def get_metadata(name):
"""The named metadata resource as a string"""
def get_metadata_lines(name):
"""Yield named metadata resource as list of non-blank non-comment lines
Reported by Pylint.
Line: 509
Column: 5
def get_metadata(name):
"""The named metadata resource as a string"""
def get_metadata_lines(name):
"""Yield named metadata resource as list of non-blank non-comment lines
Leading and trailing whitespace is stripped from each line, and lines
with ``#`` as the first non-blank character are omitted."""
Reported by Pylint.
Line: 515
Column: 5
Leading and trailing whitespace is stripped from each line, and lines
with ``#`` as the first non-blank character are omitted."""
def metadata_isdir(name):
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
def metadata_listdir(name):
"""List of metadata names in the directory (like ``os.listdir()``)"""
Reported by Pylint.
Line: 518
Column: 5
def metadata_isdir(name):
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
def metadata_listdir(name):
"""List of metadata names in the directory (like ``os.listdir()``)"""
def run_script(script_name, namespace):
"""Execute the named script in the supplied namespace dictionary"""
Reported by Pylint.
Line: 521
Column: 5
def metadata_listdir(name):
"""List of metadata names in the directory (like ``os.listdir()``)"""
def run_script(script_name, namespace):
"""Execute the named script in the supplied namespace dictionary"""
class IResourceProvider(IMetadataProvider):
"""An object that provides access to package resources"""
Reported by Pylint.
Line: 528
Column: 5
class IResourceProvider(IMetadataProvider):
"""An object that provides access to package resources"""
def get_resource_filename(manager, resource_name):
"""Return a true filesystem path for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_stream(manager, resource_name):
Reported by Pylint.
pipenv/patched/notpip/_vendor/distlib/util.py
277 issues
Line: 33
Column: 1
import dummy_threading as threading
import time
from . import DistlibException
from .compat import (string_types, text_type, shutil, raw_input, StringIO,
cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
splittype, HTTPHandler, BaseConfigurator, valid_ident,
Container, configparser, URLError, ZipFile, fsdecode,
unquote, urlparse)
Reported by Pylint.
Line: 34
Column: 1
import time
from . import DistlibException
from .compat import (string_types, text_type, shutil, raw_input, StringIO,
cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
splittype, HTTPHandler, BaseConfigurator, valid_ident,
Container, configparser, URLError, ZipFile, fsdecode,
unquote, urlparse)
Reported by Pylint.
Line: 558
Column: 57
self.write_binary_file(path, data.encode(encoding))
def set_mode(self, bits, mask, files):
if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
# Set the executable bits (owner, group, and world) on
# all the files specified.
for f in files:
if self.dry_run:
logger.info("changing mode of %s", f)
Reported by Pylint.
Line: 1410
Column: 5
yield fn
if ssl:
from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
CertificateError)
#
# HTTPSConnection which verifies certificates/matches domains
Reported by Pylint.
Line: 1539
Column: 20
if _ver_info == (2, 6):
result = HTTP(h, timeout=self.timeout)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPConnection(h)
result = self._connection[1]
return result
Reported by Pylint.
Line: 1539
Column: 48
if _ver_info == (2, 6):
result = HTTP(h, timeout=self.timeout)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPConnection(h)
result = self._connection[1]
return result
Reported by Pylint.
Line: 1559
Column: 24
if _ver_info == (2, 6):
result = HTTPS(host, None, **kwargs)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPSConnection(h, None,
**kwargs)
result = self._connection[1]
return result
Reported by Pylint.
Line: 1559
Column: 52
if _ver_info == (2, 6):
result = HTTPS(host, None, **kwargs)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPSConnection(h, None,
**kwargs)
result = self._connection[1]
return result
Reported by Pylint.
Line: 1610
Column: 9
return self
def __exit__(self, *exc_info):
self.stream.close()
class CSVReader(CSVBase):
def __init__(self, **kwargs):
if 'stream' in kwargs:
Reported by Pylint.
Line: 353
Column: 13
try:
jdata = json.load(stream)
result = jdata['extensions']['python.exports']['exports']
for group, entries in result.items():
for k, v in entries.items():
s = '%s = %s' % (k, v)
entry = get_export_entry(s)
assert entry is not None
entries[k] = entry
Reported by Pylint.
tests/integration/test_install_basic.py
253 issues
Line: 3
Column: 1
import os
import pytest
from flaky import flaky
from pipenv._compat import Path, TemporaryDirectory
from pipenv.utils import subprocess_run, temp_environ
Reported by Pylint.
Line: 5
Column: 1
import pytest
from flaky import flaky
from pipenv._compat import Path, TemporaryDirectory
from pipenv.utils import subprocess_run, temp_environ
Reported by Pylint.
Line: 1
Column: 1
import os
import pytest
from flaky import flaky
from pipenv._compat import Path, TemporaryDirectory
from pipenv.utils import subprocess_run, temp_environ
Reported by Pylint.
Line: 14
Column: 1
@pytest.mark.setup
@pytest.mark.basic
@pytest.mark.install
def test_basic_setup(PipenvInstance):
with PipenvInstance() as p:
with PipenvInstance(pipfile=False) as p:
c = p.pipenv("install requests")
assert c.returncode == 0
Reported by Pylint.
Line: 14
Column: 1
@pytest.mark.setup
@pytest.mark.basic
@pytest.mark.install
def test_basic_setup(PipenvInstance):
with PipenvInstance() as p:
with PipenvInstance(pipfile=False) as p:
c = p.pipenv("install requests")
assert c.returncode == 0
Reported by Pylint.
Line: 15
Column: 30
@pytest.mark.basic
@pytest.mark.install
def test_basic_setup(PipenvInstance):
with PipenvInstance() as p:
with PipenvInstance(pipfile=False) as p:
c = p.pipenv("install requests")
assert c.returncode == 0
assert "requests" in p.pipfile["packages"]
Reported by Pylint.
Line: 16
Column: 47
@pytest.mark.install
def test_basic_setup(PipenvInstance):
with PipenvInstance() as p:
with PipenvInstance(pipfile=False) as p:
c = p.pipenv("install requests")
assert c.returncode == 0
assert "requests" in p.pipfile["packages"]
assert "requests" in p.lockfile["default"]
Reported by Pylint.
Line: 17
Column: 13
def test_basic_setup(PipenvInstance):
with PipenvInstance() as p:
with PipenvInstance(pipfile=False) as p:
c = p.pipenv("install requests")
assert c.returncode == 0
assert "requests" in p.pipfile["packages"]
assert "requests" in p.lockfile["default"]
assert "chardet" in p.lockfile["default"]
Reported by Pylint.
Line: 18
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
with PipenvInstance() as p:
with PipenvInstance(pipfile=False) as p:
c = p.pipenv("install requests")
assert c.returncode == 0
assert "requests" in p.pipfile["packages"]
assert "requests" in p.lockfile["default"]
assert "chardet" in p.lockfile["default"]
assert "idna" in p.lockfile["default"]
Reported by Bandit.
Line: 20
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
c = p.pipenv("install requests")
assert c.returncode == 0
assert "requests" in p.pipfile["packages"]
assert "requests" in p.lockfile["default"]
assert "chardet" in p.lockfile["default"]
assert "idna" in p.lockfile["default"]
assert "urllib3" in p.lockfile["default"]
assert "certifi" in p.lockfile["default"]
Reported by Bandit.
pipenv/vendor/distlib/_backport/tarfile.py
244 issues
Line: 71
Column: 5
__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
if sys.version_info[0] < 3:
import __builtin__ as builtins
else:
import builtins
_open = builtins.open # Since 'open' is TarFile.open
Reported by Pylint.
Line: 376
Column: 21
"w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
}[mode]
if hasattr(os, "O_BINARY"):
mode |= os.O_BINARY
self.fd = os.open(name, mode, 0o666)
def close(self):
os.close(self.fd)
Reported by Pylint.
Line: 629
Column: 5
self.fileobj = fileobj
self.buf = self.fileobj.read(BLOCKSIZE)
def read(self, size):
self.read = self.fileobj.read
return self.buf
def getcomptype(self):
if self.buf.startswith(b"\037\213\010"):
Reported by Pylint.
Line: 30
Column: 1
#
from __future__ import print_function
"""Read from and write to tar format archives.
"""
__version__ = "$Revision$"
version = "0.9.0"
Reported by Pylint.
Line: 208
Column: 13
try:
n = int(nts(s, "ascii", "strict") or "0", 8)
except ValueError:
raise InvalidHeaderError("invalid header")
else:
n = 0
for i in range(len(s) - 1):
n <<= 8
n += ord(s[i + 1])
Reported by Pylint.
Line: 216
Column: 22
n += ord(s[i + 1])
return n
def itn(n, digits=8, format=DEFAULT_FORMAT):
"""Convert a python number to a number field.
"""
# POSIX 1003.1-1988 requires numbers to be encoded as a string of
# octal digits followed by a null-byte, this allows values up to
# (8**(digits-1))-1. GNU tar allows storing numbers greater than
Reported by Pylint.
Line: 232
Column: 3
raise ValueError("overflow in number field")
if n < 0:
# XXX We mimic GNU tar's behaviour with negative numbers,
# this could raise OverflowError.
n = struct.unpack("L", struct.pack("l", n))[0]
s = bytearray()
for i in range(digits - 1):
Reported by Pylint.
Line: 237
Column: 13
n = struct.unpack("L", struct.pack("l", n))[0]
s = bytearray()
for i in range(digits - 1):
s.insert(0, n & 0o377)
n >>= 8
s.insert(0, 0o200)
return s
Reported by Pylint.
Line: 272
Column: 9
BUFSIZE = 16 * 1024
blocks, remainder = divmod(length, BUFSIZE)
for b in range(blocks):
buf = src.read(BUFSIZE)
if len(buf) < BUFSIZE:
raise IOError("end of file reached")
dst.write(buf)
Reported by Pylint.
Line: 329
Column: 5
class TarError(Exception):
"""Base exception."""
pass
class ExtractError(TarError):
"""General exception for extract errors."""
pass
class ReadError(TarError):
"""Exception for unreadable tar archives."""
Reported by Pylint.
pipenv/patched/notpip/_vendor/distlib/_backport/tarfile.py
244 issues
Line: 71
Column: 5
__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
if sys.version_info[0] < 3:
import __builtin__ as builtins
else:
import builtins
_open = builtins.open # Since 'open' is TarFile.open
Reported by Pylint.
Line: 376
Column: 21
"w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
}[mode]
if hasattr(os, "O_BINARY"):
mode |= os.O_BINARY
self.fd = os.open(name, mode, 0o666)
def close(self):
os.close(self.fd)
Reported by Pylint.
Line: 629
Column: 5
self.fileobj = fileobj
self.buf = self.fileobj.read(BLOCKSIZE)
def read(self, size):
self.read = self.fileobj.read
return self.buf
def getcomptype(self):
if self.buf.startswith(b"\037\213\010"):
Reported by Pylint.
Line: 30
Column: 1
#
from __future__ import print_function
"""Read from and write to tar format archives.
"""
__version__ = "$Revision$"
version = "0.9.0"
Reported by Pylint.
Line: 208
Column: 13
try:
n = int(nts(s, "ascii", "strict") or "0", 8)
except ValueError:
raise InvalidHeaderError("invalid header")
else:
n = 0
for i in range(len(s) - 1):
n <<= 8
n += ord(s[i + 1])
Reported by Pylint.
Line: 216
Column: 22
n += ord(s[i + 1])
return n
def itn(n, digits=8, format=DEFAULT_FORMAT):
"""Convert a python number to a number field.
"""
# POSIX 1003.1-1988 requires numbers to be encoded as a string of
# octal digits followed by a null-byte, this allows values up to
# (8**(digits-1))-1. GNU tar allows storing numbers greater than
Reported by Pylint.
Line: 232
Column: 3
raise ValueError("overflow in number field")
if n < 0:
# XXX We mimic GNU tar's behaviour with negative numbers,
# this could raise OverflowError.
n = struct.unpack("L", struct.pack("l", n))[0]
s = bytearray()
for i in range(digits - 1):
Reported by Pylint.
Line: 237
Column: 13
n = struct.unpack("L", struct.pack("l", n))[0]
s = bytearray()
for i in range(digits - 1):
s.insert(0, n & 0o377)
n >>= 8
s.insert(0, 0o200)
return s
Reported by Pylint.
Line: 272
Column: 9
BUFSIZE = 16 * 1024
blocks, remainder = divmod(length, BUFSIZE)
for b in range(blocks):
buf = src.read(BUFSIZE)
if len(buf) < BUFSIZE:
raise IOError("end of file reached")
dst.write(buf)
Reported by Pylint.
Line: 329
Column: 5
class TarError(Exception):
"""Base exception."""
pass
class ExtractError(TarError):
"""General exception for extract errors."""
pass
class ReadError(TarError):
"""Exception for unreadable tar archives."""
Reported by Pylint.
pipenv/patched/yaml3/parser.py
243 issues
Line: 64
Column: 1
__all__ = ['Parser', 'ParserError']
from .error import MarkedYAMLError
from .tokens import *
from .events import *
from .scanner import *
class ParserError(MarkedYAMLError):
Reported by Pylint.
Line: 65
Column: 1
__all__ = ['Parser', 'ParserError']
from .error import MarkedYAMLError
from .tokens import *
from .events import *
from .scanner import *
class ParserError(MarkedYAMLError):
pass
Reported by Pylint.
Line: 66
Column: 1
from .error import MarkedYAMLError
from .tokens import *
from .events import *
from .scanner import *
class ParserError(MarkedYAMLError):
pass
Reported by Pylint.
Line: 67
Column: 1
from .error import MarkedYAMLError
from .tokens import *
from .events import *
from .scanner import *
class ParserError(MarkedYAMLError):
pass
class Parser:
Reported by Pylint.
Line: 130
Column: 17
def parse_stream_start(self):
# Parse the stream start.
token = self.get_token()
event = StreamStartEvent(token.start_mark, token.end_mark,
encoding=token.encoding)
# Prepare the next state.
self.state = self.parse_implicit_document_start
Reported by Pylint.
Line: 131
Column: 17
# Parse the stream start.
token = self.get_token()
event = StreamStartEvent(token.start_mark, token.end_mark,
encoding=token.encoding)
# Prepare the next state.
self.state = self.parse_implicit_document_start
Reported by Pylint.
Line: 142
Column: 16
def parse_implicit_document_start(self):
# Parse an implicit document.
if not self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
token = self.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark,
Reported by Pylint.
Line: 142
Column: 33
def parse_implicit_document_start(self):
# Parse an implicit document.
if not self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
token = self.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark,
Reported by Pylint.
Line: 142
Column: 49
def parse_implicit_document_start(self):
# Parse an implicit document.
if not self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
token = self.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark,
Reported by Pylint.
Line: 143
Column: 17
# Parse an implicit document.
if not self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
token = self.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark,
explicit=False)
Reported by Pylint.
pipenv/vendor/dateutil/rrule.py
232 issues
Line: 22
Column: 1
from six.moves import _thread, range
from ._common import weekday as weekdaybase
try:
from math import gcd
except ImportError:
from fractions import gcd
Reported by Pylint.
Line: 109
Column: 20
if self._cache_complete:
return iter(self._cache)
elif self._cache is None:
return self._iter()
else:
return self._iter_cached()
def _invalidate_cache(self):
if self._cache is not None:
Reported by Pylint.
Line: 117
Column: 31
if self._cache is not None:
self._cache = []
self._cache_complete = False
self._cache_gen = self._iter()
if self._cache_lock.locked():
self._cache_lock.release()
self._len = None
Reported by Pylint.
Line: 1126
Column: 20
def rebuild(self, year, month):
# Every mask is 7 days longer to handle cross-year weekly periods.
rr = self.rrule
if year != self.lastyear:
self.yearlen = 365 + calendar.isleap(year)
self.nextyearlen = 365 + calendar.isleap(year + 1)
firstyday = datetime.date(year, 1, 1)
self.yearordinal = firstyday.toordinal()
self.yearweekday = firstyday.weekday()
Reported by Pylint.
Line: 1217
Column: 42
for i in range(no1wkst):
self.wnomask[i] = 1
if (rr._bynweekday and (month != self.lastmonth or
year != self.lastyear)):
ranges = []
if rr._freq == YEARLY:
if rr._bymonth:
for month in rr._bymonth:
Reported by Pylint.
Line: 1218
Column: 41
self.wnomask[i] = 1
if (rr._bynweekday and (month != self.lastmonth or
year != self.lastyear)):
ranges = []
if rr._freq == YEARLY:
if rr._bymonth:
for month in rr._bymonth:
ranges.append(self.mrange[month-1:month+1])
Reported by Pylint.
Line: 1580
Column: 21
except KeyError:
continue
if tzids is None:
from . import tz
tzlookup = tz.gettz
elif callable(tzids):
tzlookup = tzids
else:
tzlookup = getattr(tzids, 'get', None)
Reported by Pylint.
Line: 88
Column: 9
@wraps(f)
def inner_func(self, *args, **kwargs):
rv = f(self, *args, **kwargs)
self._invalidate_cache()
return rv
return inner_func
Reported by Pylint.
Line: 136
Column: 25
if self._cache_complete:
break
try:
for j in range(10):
cache.append(advance_iterator(gen))
except StopIteration:
self._cache_gen = gen = None
self._cache_complete = True
break
Reported by Pylint.
Line: 139
Column: 21
for j in range(10):
cache.append(advance_iterator(gen))
except StopIteration:
self._cache_gen = gen = None
self._cache_complete = True
break
release()
yield cache[i]
i += 1
Reported by Pylint.
pipenv/vendor/cerberus/tests/test_validation.py
219 issues
Line: 10
Column: 1
from random import choice
from string import ascii_lowercase
from pytest import mark
from cerberus import errors, Validator
from cerberus.tests import (
assert_bad_type,
assert_document_error,
Reported by Pylint.
Line: 12
Column: 1
from pytest import mark
from cerberus import errors, Validator
from cerberus.tests import (
assert_bad_type,
assert_document_error,
assert_fail,
assert_has_error,
Reported by Pylint.
Line: 13
Column: 1
from pytest import mark
from cerberus import errors, Validator
from cerberus.tests import (
assert_bad_type,
assert_document_error,
assert_fail,
assert_has_error,
assert_not_has_error,
Reported by Pylint.
Line: 21
Column: 1
assert_not_has_error,
assert_success,
)
from cerberus.tests.conftest import sample_schema
def test_empty_document():
assert_document_error(None, sample_schema, None, errors.DOCUMENT_MISSING)
Reported by Pylint.
Line: 686
Column: 3
assert len(v['a_dict_with_valuesrules']['valuesrules'].descendants) == 1
# TODO remove 'keyschema' as rule with the next major release
@mark.parametrize('rule', ('keysrules', 'keyschema'))
def test_keysrules(rule):
schema = {
'a_dict_with_keysrules': {
'type': 'dict',
Reported by Pylint.
Line: 754
Column: 3
if value < min_number:
self._error(field, 'Below the min')
# TODO replace with TypeDefintion in next major release
def _validate_type_number(self, value):
if isinstance(value, int):
return True
schema = {'test_field': {'min_number': 1, 'type': 'number'}}
Reported by Pylint.
Line: 893
Column: 5
return True
validator = CustomValidator({})
validator.allow_unknown = {"type": "foo"}
assert_success(document={"fred": "foo", "barney": "foo"}, validator=validator)
def test_nested_unknown_keys():
schema = {
Reported by Pylint.
Line: 1129
Column: 38
"""
class MyValidator(Validator):
def _validate_root_doc(self, root_doc, field, value):
"""{'type': 'boolean'}"""
if 'sub' not in self.root_document or len(self.root_document['sub']) != 2:
self._error(field, 'self.context is not the root doc!')
schema = {
Reported by Pylint.
Line: 1129
Column: 55
"""
class MyValidator(Validator):
def _validate_root_doc(self, root_doc, field, value):
"""{'type': 'boolean'}"""
if 'sub' not in self.root_document or len(self.root_document['sub']) != 2:
self._error(field, 'self.context is not the root doc!')
schema = {
Reported by Pylint.
Line: 1634
Column: 13
assert_fail({'text': 'foo'}, schema)
except TypeError as e:
if str(e) == "argument of type 'bool' is not iterable":
raise AssertionError(
"Bug #138 still exists, couldn't use boolean in dependency "
"without putting it in a list.\n"
"'some_field': True vs 'some_field: [True]"
)
else:
Reported by Pylint.
pipenv/patched/notpip/_vendor/html5lib/_tokenizer.py
216 issues
Line: 7
Column: 1
from collections import deque
from .constants import spaceCharacters
from .constants import entities
from .constants import asciiLetters, asciiUpper2Lower
from .constants import digits, hexDigits, EOF
from .constants import tokenTypes, tagTokenTypes
from .constants import replacementCharacters
Reported by Pylint.
Line: 8
Column: 1
from collections import deque
from .constants import spaceCharacters
from .constants import entities
from .constants import asciiLetters, asciiUpper2Lower
from .constants import digits, hexDigits, EOF
from .constants import tokenTypes, tagTokenTypes
from .constants import replacementCharacters
Reported by Pylint.
Line: 9
Column: 1
from .constants import spaceCharacters
from .constants import entities
from .constants import asciiLetters, asciiUpper2Lower
from .constants import digits, hexDigits, EOF
from .constants import tokenTypes, tagTokenTypes
from .constants import replacementCharacters
from ._inputstream import HTMLInputStream
Reported by Pylint.
Line: 10
Column: 1
from .constants import spaceCharacters
from .constants import entities
from .constants import asciiLetters, asciiUpper2Lower
from .constants import digits, hexDigits, EOF
from .constants import tokenTypes, tagTokenTypes
from .constants import replacementCharacters
from ._inputstream import HTMLInputStream
Reported by Pylint.
Line: 11
Column: 1
from .constants import entities
from .constants import asciiLetters, asciiUpper2Lower
from .constants import digits, hexDigits, EOF
from .constants import tokenTypes, tagTokenTypes
from .constants import replacementCharacters
from ._inputstream import HTMLInputStream
from ._trie import Trie
Reported by Pylint.
Line: 12
Column: 1
from .constants import asciiLetters, asciiUpper2Lower
from .constants import digits, hexDigits, EOF
from .constants import tokenTypes, tagTokenTypes
from .constants import replacementCharacters
from ._inputstream import HTMLInputStream
from ._trie import Trie
Reported by Pylint.
Line: 14
Column: 1
from .constants import tokenTypes, tagTokenTypes
from .constants import replacementCharacters
from ._inputstream import HTMLInputStream
from ._trie import Trie
entitiesTrie = Trie(entities)
Reported by Pylint.
Line: 16
Column: 1
from ._inputstream import HTMLInputStream
from ._trie import Trie
entitiesTrie = Trie(entities)
class HTMLTokenizer(object):
Reported by Pylint.
Line: 1709
Column: 1
else:
data.append(char)
data = "".join(data) # pylint:disable=redefined-variable-type
# Deal with null here rather than in the parser
nullCount = data.count("\u0000")
if nullCount > 0:
for _ in range(nullCount):
self.tokenQueue.append({"type": tokenTypes["ParseError"],
Reported by Pylint.
Line: 3
Column: 1
from __future__ import absolute_import, division, unicode_literals
from pipenv.patched.notpip._vendor.six import unichr as chr
from collections import deque
from .constants import spaceCharacters
from .constants import entities
from .constants import asciiLetters, asciiUpper2Lower
Reported by Pylint.
pipenv/patched/notpip/_vendor/html5lib/_inputstream.py
213 issues
Line: 4
Column: 1
from __future__ import absolute_import, division, unicode_literals
from pipenv.patched.notpip._vendor.six import text_type, binary_type
from pipenv.patched.notpip._vendor.six.moves import http_client, urllib
import codecs
import re
from pipenv.patched.notpip._vendor import webencodings
Reported by Pylint.
Line: 11
Column: 1
from pipenv.patched.notpip._vendor import webencodings
from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
from .constants import _ReparseException
from . import _utils
from io import StringIO
Reported by Pylint.
Line: 12
Column: 1
from pipenv.patched.notpip._vendor import webencodings
from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
from .constants import _ReparseException
from . import _utils
from io import StringIO
try:
Reported by Pylint.
Line: 13
Column: 1
from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
from .constants import _ReparseException
from . import _utils
from io import StringIO
try:
from io import BytesIO
Reported by Pylint.
Line: 38
Suggestion:
https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b307-eval
# surrogates.
assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1
invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] +
eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used
"]")
else:
invalid_unicode_re = re.compile(invalid_unicode_no_surrogate)
non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
Reported by Bandit.
Line: 85
Column: 20
i += 1
self.position = [i, offset]
def read(self, bytes):
if not self.buffer:
return self._readStream(bytes)
elif (self.position[0] == len(self.buffer) and
self.position[1] == len(self.buffer[-1])):
return self._readStream(bytes)
Reported by Pylint.
Line: 97
Column: 27
def _bufferedBytes(self):
return sum([len(item) for item in self.buffer])
def _readStream(self, bytes):
data = self.stream.read(bytes)
self.buffer.append(data)
self.position[0] += 1
self.position[1] = len(data)
return data
Reported by Pylint.
Line: 104
Column: 31
self.position[1] = len(data)
return data
def _readFromBuffer(self, bytes):
remainingBytes = bytes
rv = []
bufferIndex = self.position[0]
bufferOffset = self.position[1]
while bufferIndex < len(self.buffer) and remainingBytes != 0:
Reported by Pylint.
Line: 251
Column: 9
chunkOffset = self.chunkOffset
char = self.chunk[chunkOffset]
self.chunkOffset = chunkOffset + 1
return char
def readChunk(self, chunkSize=None):
if chunkSize is None:
Reported by Pylint.
Line: 259
Column: 28
if chunkSize is None:
chunkSize = self._defaultChunkSize
self.prevNumLines, self.prevNumCols = self._position(self.chunkSize)
self.chunk = ""
self.chunkSize = 0
self.chunkOffset = 0
Reported by Pylint.