The following issues were found
pandas/tests/indexes/timedeltas/test_formats.py
13 issues
Line: 1
Column: 1
import pytest
import pandas as pd
from pandas import (
Series,
TimedeltaIndex,
)
Reported by Pylint.
Line: 92
Column: 22
for idx, expected in zip(
[idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]
):
result = idx._summary()
assert result == expected
Reported by Pylint.
Line: 1
Column: 1
import pytest
import pandas as pd
from pandas import (
Series,
TimedeltaIndex,
)
Reported by Pylint.
Line: 10
Column: 1
)
class TestTimedeltaIndexRendering:
@pytest.mark.parametrize("method", ["__repr__", "__str__"])
def test_representation(self, method):
idx1 = TimedeltaIndex([], freq="D")
idx2 = TimedeltaIndex(["1 days"], freq="D")
idx3 = TimedeltaIndex(["1 days", "2 days"], freq="D")
Reported by Pylint.
Line: 12
Column: 5
class TestTimedeltaIndexRendering:
@pytest.mark.parametrize("method", ["__repr__", "__str__"])
def test_representation(self, method):
idx1 = TimedeltaIndex([], freq="D")
idx2 = TimedeltaIndex(["1 days"], freq="D")
idx3 = TimedeltaIndex(["1 days", "2 days"], freq="D")
idx4 = TimedeltaIndex(["1 days", "2 days", "3 days"], freq="D")
idx5 = TimedeltaIndex(["1 days 00:00:01", "2 days", "3 days"])
Reported by Pylint.
Line: 12
Column: 5
class TestTimedeltaIndexRendering:
@pytest.mark.parametrize("method", ["__repr__", "__str__"])
def test_representation(self, method):
idx1 = TimedeltaIndex([], freq="D")
idx2 = TimedeltaIndex(["1 days"], freq="D")
idx3 = TimedeltaIndex(["1 days", "2 days"], freq="D")
idx4 = TimedeltaIndex(["1 days", "2 days", "3 days"], freq="D")
idx5 = TimedeltaIndex(["1 days 00:00:01", "2 days", "3 days"])
Reported by Pylint.
Line: 40
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
[idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]
):
result = getattr(idx, method)()
assert result == expected
def test_representation_to_series(self):
idx1 = TimedeltaIndex([], freq="D")
idx2 = TimedeltaIndex(["1 days"], freq="D")
idx3 = TimedeltaIndex(["1 days", "2 days"], freq="D")
Reported by Bandit.
Line: 42
Column: 5
result = getattr(idx, method)()
assert result == expected
def test_representation_to_series(self):
idx1 = TimedeltaIndex([], freq="D")
idx2 = TimedeltaIndex(["1 days"], freq="D")
idx3 = TimedeltaIndex(["1 days", "2 days"], freq="D")
idx4 = TimedeltaIndex(["1 days", "2 days", "3 days"], freq="D")
idx5 = TimedeltaIndex(["1 days 00:00:01", "2 days", "3 days"])
Reported by Pylint.
Line: 42
Column: 5
result = getattr(idx, method)()
assert result == expected
def test_representation_to_series(self):
idx1 = TimedeltaIndex([], freq="D")
idx2 = TimedeltaIndex(["1 days"], freq="D")
idx3 = TimedeltaIndex(["1 days", "2 days"], freq="D")
idx4 = TimedeltaIndex(["1 days", "2 days", "3 days"], freq="D")
idx5 = TimedeltaIndex(["1 days 00:00:01", "2 days", "3 days"])
Reported by Pylint.
Line: 69
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
[idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]
):
result = repr(Series(idx))
assert result == expected
def test_summary(self):
# GH#9116
idx1 = TimedeltaIndex([], freq="D")
idx2 = TimedeltaIndex(["1 days"], freq="D")
Reported by Bandit.
pandas/tests/test_optional_dependency.py
13 issues
Line: 4
Column: 1
import sys
import types
import pytest
from pandas.compat._optional import (
VERSIONS,
import_optional_dependency,
)
Reported by Pylint.
Line: 1
Column: 1
import sys
import types
import pytest
from pandas.compat._optional import (
VERSIONS,
import_optional_dependency,
)
Reported by Pylint.
Line: 14
Column: 1
import pandas._testing as tm
def test_import_optional():
match = "Missing .*notapackage.* pip .* conda .* notapackage"
with pytest.raises(ImportError, match=match):
import_optional_dependency("notapackage")
result = import_optional_dependency("notapackage", errors="ignore")
Reported by Pylint.
Line: 20
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
import_optional_dependency("notapackage")
result = import_optional_dependency("notapackage", errors="ignore")
assert result is None
def test_xlrd_version_fallback():
pytest.importorskip("xlrd")
import_optional_dependency("xlrd")
Reported by Bandit.
Line: 23
Column: 1
assert result is None
def test_xlrd_version_fallback():
pytest.importorskip("xlrd")
import_optional_dependency("xlrd")
def test_bad_version(monkeypatch):
Reported by Pylint.
Line: 28
Column: 1
import_optional_dependency("xlrd")
def test_bad_version(monkeypatch):
name = "fakemodule"
module = types.ModuleType(name)
module.__version__ = "0.9.0"
sys.modules[name] = module
monkeypatch.setitem(VERSIONS, name, "1.0.0")
Reported by Pylint.
Line: 41
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
# Test min_version parameter
result = import_optional_dependency("fakemodule", min_version="0.8")
assert result is module
with tm.assert_produces_warning(UserWarning):
result = import_optional_dependency("fakemodule", errors="warn")
assert result is None
Reported by Bandit.
Line: 45
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
with tm.assert_produces_warning(UserWarning):
result = import_optional_dependency("fakemodule", errors="warn")
assert result is None
module.__version__ = "1.0.0" # exact match is OK
result = import_optional_dependency("fakemodule")
assert result is module
Reported by Bandit.
Line: 49
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
module.__version__ = "1.0.0" # exact match is OK
result = import_optional_dependency("fakemodule")
assert result is module
def test_submodule(monkeypatch):
# Create a fake module with a submodule
name = "fakemodule"
Reported by Bandit.
Line: 52
Column: 1
assert result is module
def test_submodule(monkeypatch):
# Create a fake module with a submodule
name = "fakemodule"
module = types.ModuleType(name)
module.__version__ = "0.9.0"
sys.modules[name] = module
Reported by Pylint.
pandas/tests/io/formats/test_console.py
13 issues
Line: 3
Column: 1
import locale
import pytest
from pandas._config import detect_console_encoding
class MockEncoding: # TODO(py27): replace with mock
"""
Reported by Pylint.
Line: 8
Column: 3
from pandas._config import detect_console_encoding
class MockEncoding: # TODO(py27): replace with mock
"""
Used to add a side effect when accessing the 'encoding' property. If the
side effect is a str in nature, the value will be returned. Otherwise, the
side effect should be an exception that will be raised.
"""
Reported by Pylint.
Line: 62
Column: 72
[IOError, locale.Error],
],
)
def test_detect_console_encoding_fallback_to_default(monkeypatch, std, locale):
# When both the stdout/stdin encoding and locale preferred encoding checks
# fail (or return 'ascii', we should default to the sys default encoding.
# GH 21552
with monkeypatch.context() as context:
context.setattr(
Reported by Pylint.
Line: 1
Column: 1
import locale
import pytest
from pandas._config import detect_console_encoding
class MockEncoding: # TODO(py27): replace with mock
"""
Reported by Pylint.
Line: 20
Column: 5
self.val = encoding
@property
def encoding(self):
return self.raise_or_return(self.val)
@staticmethod
def raise_or_return(val):
if isinstance(val, str):
Reported by Pylint.
Line: 24
Column: 5
return self.raise_or_return(self.val)
@staticmethod
def raise_or_return(val):
if isinstance(val, str):
return val
else:
raise val
Reported by Pylint.
Line: 25
Column: 9
@staticmethod
def raise_or_return(val):
if isinstance(val, str):
return val
else:
raise val
Reported by Pylint.
Line: 32
Column: 1
@pytest.mark.parametrize("empty,filled", [["stdin", "stdout"], ["stdout", "stdin"]])
def test_detect_console_encoding_from_stdout_stdin(monkeypatch, empty, filled):
# Ensures that when sys.stdout.encoding or sys.stdin.encoding is used when
# they have values filled.
# GH 21552
with monkeypatch.context() as context:
context.setattr(f"sys.{empty}", MockEncoding(""))
Reported by Pylint.
Line: 39
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
with monkeypatch.context() as context:
context.setattr(f"sys.{empty}", MockEncoding(""))
context.setattr(f"sys.{filled}", MockEncoding(filled))
assert detect_console_encoding() == filled
@pytest.mark.parametrize("encoding", [AttributeError, IOError, "ascii"])
def test_detect_console_encoding_fallback_to_locale(monkeypatch, encoding):
# GH 21552
Reported by Bandit.
Line: 43
Column: 1
@pytest.mark.parametrize("encoding", [AttributeError, IOError, "ascii"])
def test_detect_console_encoding_fallback_to_locale(monkeypatch, encoding):
# GH 21552
with monkeypatch.context() as context:
context.setattr("locale.getpreferredencoding", lambda: "foo")
context.setattr("sys.stdout", MockEncoding(encoding))
assert detect_console_encoding() == "foo"
Reported by Pylint.
scripts/tests/test_sync_flake8_versions.py
13 issues
Line: 1
Column: 1
import pytest
from ..sync_flake8_versions import get_revisions
def test_wrong_yesqa_flake8(capsys):
precommit_config = {
"repos": [
{
Reported by Pylint.
Line: 3
Column: 1
import pytest
from ..sync_flake8_versions import get_revisions
def test_wrong_yesqa_flake8(capsys):
precommit_config = {
"repos": [
{
Reported by Pylint.
Line: 176
Column: 35
assert result == expected
def test_get_revisions_no_failure(capsys):
precommit_config = {
"repos": [
{
"repo": "https://gitlab.com/pycqa/flake8",
"rev": "0.1.1",
Reported by Pylint.
Line: 1
Column: 1
import pytest
from ..sync_flake8_versions import get_revisions
def test_wrong_yesqa_flake8(capsys):
precommit_config = {
"repos": [
{
Reported by Pylint.
Line: 6
Column: 1
from ..sync_flake8_versions import get_revisions
def test_wrong_yesqa_flake8(capsys):
precommit_config = {
"repos": [
{
"repo": "https://gitlab.com/pycqa/flake8",
"rev": "0.1.1",
Reported by Pylint.
Line: 41
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
get_revisions(precommit_config, environment)
result, _ = capsys.readouterr()
expected = "flake8 in 'yesqa' does not match in 'flake8' from 'pre-commit'\n"
assert result == expected
def test_wrong_env_flake8(capsys):
precommit_config = {
"repos": [
Reported by Bandit.
Line: 44
Column: 1
assert result == expected
def test_wrong_env_flake8(capsys):
precommit_config = {
"repos": [
{
"repo": "https://gitlab.com/pycqa/flake8",
"rev": "0.1.1",
Reported by Pylint.
Line: 81
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
expected = (
"flake8 in 'environment.yml' does not match in 'flake8' from 'pre-commit'\n"
)
assert result == expected
def test_wrong_yesqa_add_dep(capsys):
precommit_config = {
"repos": [
Reported by Bandit.
Line: 84
Column: 1
assert result == expected
def test_wrong_yesqa_add_dep(capsys):
precommit_config = {
"repos": [
{
"repo": "https://gitlab.com/pycqa/flake8",
"rev": "0.1.1",
Reported by Pylint.
Line: 127
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
"Mismatch of 'flake8-bugs' version between 'flake8' and 'yesqa' in "
"'.pre-commit-config.yaml'\n"
)
assert result == expected
def test_wrong_env_add_dep(capsys):
precommit_config = {
"repos": [
Reported by Bandit.
web/pandas_web.py
13 issues
Line: 280
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b701_jinja2_autoescape_false.html
sys.stderr.write("Context generated\n")
templates_path = os.path.join(source_path, context["main"]["templates_path"])
jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_path))
for fname in get_source_files(source_path):
if os.path.normpath(fname) in context["main"]["ignore"]:
continue
Reported by Bandit.
Line: 37
Column: 1
import time
import typing
import feedparser
import jinja2
import markdown
import requests
import yaml
Reported by Pylint.
Line: 39
Column: 1
import feedparser
import jinja2
import markdown
import requests
import yaml
class Preprocessors:
Reported by Pylint.
Line: 102
Column: 21
try:
body_position = summary.index(title) + len(title)
except ValueError:
raise ValueError(
f'Blog post "{fname}" should have a markdown header '
f'corresponding to its "Title" element "{title}"'
)
summary = " ".join(summary[body_position:].split(" ")[:30])
posts.append(
Reported by Pylint.
Line: 243
Column: 15
"""
Generate the list of files present in the source directory.
"""
for root, dirs, fnames in os.walk(source_path):
root = os.path.relpath(root, source_path)
for fname in fnames:
yield os.path.join(root, fname)
Reported by Pylint.
Line: 72
Column: 5
return context
@staticmethod
def blog_add_posts(context):
"""
Given the blog feed defined in the configuration yaml, this context
preprocessor fetches the posts in the feeds, and returns the relevant
information for them (sorted from newest to oldest).
"""
Reported by Pylint.
Line: 92
Column: 17
f"/{context['blog']['posts_path']}"
f"/{os.path.splitext(fname)[0]}.html"
)
md = markdown.Markdown(
extensions=context["main"]["markdown_extensions"]
)
with open(os.path.join(posts_path, fname)) as f:
html = md.convert(f.read())
title = md.Meta["title"][0]
Reported by Pylint.
Line: 95
Column: 63
md = markdown.Markdown(
extensions=context["main"]["markdown_extensions"]
)
with open(os.path.join(posts_path, fname)) as f:
html = md.convert(f.read())
title = md.Meta["title"][0]
summary = re.sub(tag_expr, "", html)
try:
body_position = summary.index(title) + len(title)
Reported by Pylint.
Line: 159
Column: 5
return context
@staticmethod
def home_add_releases(context):
context["releases"] = []
github_repo_url = context["main"]["github_repo_url"]
resp = requests.get(f"https://api.github.com/repos/{github_repo_url}/releases")
if context["ignore_io_errors"] and resp.status_code == 403:
Reported by Pylint.
Line: 220
Column: 32
Load the config yaml as the base context, and enrich it with the
information added by the context preprocessors defined in the file.
"""
with open(config_fname) as f:
context = yaml.safe_load(f)
context["source_path"] = os.path.dirname(config_fname)
context["ignore_io_errors"] = ignore_io_errors
context.update(kwargs)
Reported by Pylint.
pandas/tests/tslibs/test_parse_iso8601.py
13 issues
Line: 3
Column: 1
from datetime import datetime
import pytest
from pandas._libs import tslib
@pytest.mark.parametrize(
"date_str, exp",
Reported by Pylint.
Line: 5
Column: 1
import pytest
from pandas._libs import tslib
@pytest.mark.parametrize(
"date_str, exp",
[
Reported by Pylint.
Line: 28
Column: 14
#
# Test only the ISO parser - flexibility to
# different separators and leading zero's.
actual = tslib._test_parse_iso8601(date_str)
assert actual == exp
@pytest.mark.parametrize(
"date_str",
Reported by Pylint.
Line: 57
Column: 9
msg = f'Error parsing datetime string "{date_str}"'
with pytest.raises(ValueError, match=msg):
tslib._test_parse_iso8601(date_str)
def test_parsers_iso8601_invalid_offset_invalid():
date_str = "2001-01-01 12-34-56"
msg = f'Timezone hours offset out of range in datetime string "{date_str}"'
Reported by Pylint.
Line: 65
Column: 9
msg = f'Timezone hours offset out of range in datetime string "{date_str}"'
with pytest.raises(ValueError, match=msg):
tslib._test_parse_iso8601(date_str)
def test_parsers_iso8601_leading_space():
# GH#25895 make sure isoparser doesn't overflow with long input
date_str, expected = ("2013-1-1 5:30:00", datetime(2013, 1, 1, 5, 30))
Reported by Pylint.
Line: 71
Column: 14
def test_parsers_iso8601_leading_space():
# GH#25895 make sure isoparser doesn't overflow with long input
date_str, expected = ("2013-1-1 5:30:00", datetime(2013, 1, 1, 5, 30))
actual = tslib._test_parse_iso8601(" " * 200 + date_str)
assert actual == expected
Reported by Pylint.
Line: 1
Column: 1
from datetime import datetime
import pytest
from pandas._libs import tslib
@pytest.mark.parametrize(
"date_str, exp",
Reported by Pylint.
Line: 21
Column: 1
("2011\\01\\02", datetime(2011, 1, 2)),
("2013-01-01 05:30:00", datetime(2013, 1, 1, 5, 30)),
("2013-1-1 5:30:00", datetime(2013, 1, 1, 5, 30)),
],
)
def test_parsers_iso8601(date_str, exp):
# see gh-12060
#
# Test only the ISO parser - flexibility to
Reported by Pylint.
Line: 29
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
# Test only the ISO parser - flexibility to
# different separators and leading zero's.
actual = tslib._test_parse_iso8601(date_str)
assert actual == exp
@pytest.mark.parametrize(
"date_str",
[
Reported by Bandit.
Line: 51
Column: 1
"20010101 123",
"20010101 12345",
"20010101 12345Z",
],
)
def test_parsers_iso8601_invalid(date_str):
msg = f'Error parsing datetime string "{date_str}"'
with pytest.raises(ValueError, match=msg):
Reported by Pylint.
pandas/tests/indexes/multi/conftest.py
13 issues
Line: 2
Column: 1
import numpy as np
import pytest
import pandas as pd
from pandas import (
Index,
MultiIndex,
)
Reported by Pylint.
Line: 21
Column: 5
major_codes = np.array([0, 0, 1, 2, 3, 3])
minor_codes = np.array([0, 1, 0, 1, 0, 1])
index_names = ["first", "second"]
mi = MultiIndex(
levels=[major_axis, minor_axis],
codes=[major_codes, minor_codes],
names=index_names,
verify_integrity=False,
Reported by Pylint.
Line: 39
Column: 5
major_codes = np.array([0, 0, 1, 0, 1, 1])
minor_codes = np.array([0, 1, 0, 1, 0, 1])
index_names = ["first", "second"]
mi = MultiIndex(
levels=[major_axis, minor_axis],
codes=[major_codes, minor_codes],
names=index_names,
verify_integrity=False,
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
import pandas as pd
from pandas import (
Index,
MultiIndex,
)
Reported by Pylint.
Line: 13
Column: 1
# Note: identical the the "multi" entry in the top-level "index" fixture
@pytest.fixture
def idx():
# a MultiIndex used to test the general functionality of the
# general functionality of this object
major_axis = Index(["foo", "bar", "baz", "qux"])
minor_axis = Index(["one", "two"])
Reported by Pylint.
Line: 22
Column: 5
major_codes = np.array([0, 0, 1, 2, 3, 3])
minor_codes = np.array([0, 1, 0, 1, 0, 1])
index_names = ["first", "second"]
mi = MultiIndex(
levels=[major_axis, minor_axis],
codes=[major_codes, minor_codes],
names=index_names,
verify_integrity=False,
)
Reported by Pylint.
Line: 32
Column: 1
@pytest.fixture
def idx_dup():
# compare tests/indexes/multi/conftest.py
major_axis = Index(["foo", "bar", "baz", "qux"])
minor_axis = Index(["one", "two"])
major_codes = np.array([0, 0, 1, 0, 1, 1])
Reported by Pylint.
Line: 40
Column: 5
major_codes = np.array([0, 0, 1, 0, 1, 1])
minor_codes = np.array([0, 1, 0, 1, 0, 1])
index_names = ["first", "second"]
mi = MultiIndex(
levels=[major_axis, minor_axis],
codes=[major_codes, minor_codes],
names=index_names,
verify_integrity=False,
)
Reported by Pylint.
Line: 50
Column: 1
@pytest.fixture
def index_names():
# names that match those in the idx fixture for testing equality of
# names assigned to the idx
return ["first", "second"]
Reported by Pylint.
Line: 61
Column: 5
"""
Return a MultiIndex that is narrower than the display (<80 characters).
"""
n = 1000
ci = pd.CategoricalIndex(list("a" * n) + (["abc"] * n))
dti = pd.date_range("2000-01-01", freq="s", periods=n * 2)
return MultiIndex.from_arrays([ci, ci.codes + 9, dti], names=["a", "b", "dti"])
Reported by Pylint.
pandas/tests/indexes/period/test_tools.py
13 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
Period,
PeriodIndex,
period_range,
)
import pandas._testing as tm
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas import (
Period,
PeriodIndex,
period_range,
)
import pandas._testing as tm
Reported by Pylint.
Line: 17
Column: 5
Wish to match NumPy units
"""
def _check_freq(self, freq, base_date):
rng = period_range(start=base_date, periods=10, freq=freq)
exp = np.arange(10, dtype=np.int64)
tm.assert_numpy_array_equal(rng.asi8, exp)
Reported by Pylint.
Line: 23
Column: 5
tm.assert_numpy_array_equal(rng.asi8, exp)
def test_annual(self):
self._check_freq("A", 1970)
def test_monthly(self):
self._check_freq("M", "1970-01")
Reported by Pylint.
Line: 26
Column: 5
def test_annual(self):
self._check_freq("A", 1970)
def test_monthly(self):
self._check_freq("M", "1970-01")
@pytest.mark.parametrize("freq", ["W-THU", "D", "B", "H", "T", "S", "L", "U", "N"])
def test_freq(self, freq):
self._check_freq(freq, "1970-01-01")
Reported by Pylint.
Line: 30
Column: 5
self._check_freq("M", "1970-01")
@pytest.mark.parametrize("freq", ["W-THU", "D", "B", "H", "T", "S", "L", "U", "N"])
def test_freq(self, freq):
self._check_freq(freq, "1970-01-01")
class TestPeriodIndexConversion:
def test_tolist(self):
Reported by Pylint.
Line: 34
Column: 1
self._check_freq(freq, "1970-01-01")
class TestPeriodIndexConversion:
def test_tolist(self):
index = period_range(freq="A", start="1/1/2001", end="12/1/2009")
rs = index.tolist()
for x in rs:
assert isinstance(x, Period)
Reported by Pylint.
Line: 34
Column: 1
self._check_freq(freq, "1970-01-01")
class TestPeriodIndexConversion:
def test_tolist(self):
index = period_range(freq="A", start="1/1/2001", end="12/1/2009")
rs = index.tolist()
for x in rs:
assert isinstance(x, Period)
Reported by Pylint.
Line: 35
Column: 5
class TestPeriodIndexConversion:
def test_tolist(self):
index = period_range(freq="A", start="1/1/2001", end="12/1/2009")
rs = index.tolist()
for x in rs:
assert isinstance(x, Period)
Reported by Pylint.
Line: 35
Column: 5
class TestPeriodIndexConversion:
def test_tolist(self):
index = period_range(freq="A", start="1/1/2001", end="12/1/2009")
rs = index.tolist()
for x in rs:
assert isinstance(x, Period)
Reported by Pylint.
pandas/tests/series/methods/test_truncate.py
13 issues
Line: 1
Column: 1
from datetime import datetime
import pandas as pd
from pandas import (
Series,
date_range,
)
import pandas._testing as tm
Reported by Pylint.
Line: 11
Column: 1
import pandas._testing as tm
class TestTruncate:
def test_truncate_datetimeindex_tz(self):
# GH 9243
idx = date_range("4/1/2005", "4/30/2005", freq="D", tz="US/Pacific")
s = Series(range(len(idx)), index=idx)
with tm.assert_produces_warning(FutureWarning):
Reported by Pylint.
Line: 12
Column: 5
class TestTruncate:
def test_truncate_datetimeindex_tz(self):
# GH 9243
idx = date_range("4/1/2005", "4/30/2005", freq="D", tz="US/Pacific")
s = Series(range(len(idx)), index=idx)
with tm.assert_produces_warning(FutureWarning):
# GH#36148 in the future will require tzawareness compat
Reported by Pylint.
Line: 12
Column: 5
class TestTruncate:
def test_truncate_datetimeindex_tz(self):
# GH 9243
idx = date_range("4/1/2005", "4/30/2005", freq="D", tz="US/Pacific")
s = Series(range(len(idx)), index=idx)
with tm.assert_produces_warning(FutureWarning):
# GH#36148 in the future will require tzawareness compat
Reported by Pylint.
Line: 15
Column: 9
def test_truncate_datetimeindex_tz(self):
# GH 9243
idx = date_range("4/1/2005", "4/30/2005", freq="D", tz="US/Pacific")
s = Series(range(len(idx)), index=idx)
with tm.assert_produces_warning(FutureWarning):
# GH#36148 in the future will require tzawareness compat
s.truncate(datetime(2005, 4, 2), datetime(2005, 4, 4))
lb = idx[1]
Reported by Pylint.
Line: 20
Column: 9
# GH#36148 in the future will require tzawareness compat
s.truncate(datetime(2005, 4, 2), datetime(2005, 4, 4))
lb = idx[1]
ub = idx[3]
result = s.truncate(lb.to_pydatetime(), ub.to_pydatetime())
expected = Series([1, 2, 3], index=idx[1:4])
tm.assert_series_equal(result, expected)
Reported by Pylint.
Line: 21
Column: 9
s.truncate(datetime(2005, 4, 2), datetime(2005, 4, 4))
lb = idx[1]
ub = idx[3]
result = s.truncate(lb.to_pydatetime(), ub.to_pydatetime())
expected = Series([1, 2, 3], index=idx[1:4])
tm.assert_series_equal(result, expected)
def test_truncate_periodindex(self):
Reported by Pylint.
Line: 26
Column: 5
expected = Series([1, 2, 3], index=idx[1:4])
tm.assert_series_equal(result, expected)
def test_truncate_periodindex(self):
# GH 17717
idx1 = pd.PeriodIndex(
[pd.Period("2017-09-02"), pd.Period("2017-09-02"), pd.Period("2017-09-03")]
)
series1 = Series([1, 2, 3], index=idx1)
Reported by Pylint.
Line: 26
Column: 5
expected = Series([1, 2, 3], index=idx[1:4])
tm.assert_series_equal(result, expected)
def test_truncate_periodindex(self):
# GH 17717
idx1 = pd.PeriodIndex(
[pd.Period("2017-09-02"), pd.Period("2017-09-02"), pd.Period("2017-09-03")]
)
series1 = Series([1, 2, 3], index=idx1)
Reported by Pylint.
Line: 48
Column: 5
expected_idx2 = pd.PeriodIndex([pd.Period("2017-09-02")])
tm.assert_series_equal(result2, Series([2], index=expected_idx2))
def test_truncate_one_element_series(self):
# GH 35544
series = Series([0.1], index=pd.DatetimeIndex(["2020-08-04"]))
before = pd.Timestamp("2020-08-02")
after = pd.Timestamp("2020-08-04")
Reported by Pylint.
pandas/tests/series/methods/test_convert_dtypes.py
13 issues
Line: 4
Column: 1
from itertools import product
import numpy as np
import pytest
from pandas.core.dtypes.common import is_interval_dtype
import pandas as pd
import pandas._testing as tm
Reported by Pylint.
Line: 1
Column: 1
from itertools import product
import numpy as np
import pytest
from pandas.core.dtypes.common import is_interval_dtype
import pandas as pd
import pandas._testing as tm
Reported by Pylint.
Line: 151
Column: 1
]
class TestSeriesConvertDtypes:
@pytest.mark.parametrize(
"data, maindtype, expected_default, expected_other",
test_cases,
)
@pytest.mark.parametrize("params", product(*[(True, False)] * 5))
Reported by Pylint.
Line: 156
Column: 5
"data, maindtype, expected_default, expected_other",
test_cases,
)
@pytest.mark.parametrize("params", product(*[(True, False)] * 5))
def test_convert_dtypes(
self, data, maindtype, params, expected_default, expected_other
):
warn = None
if (
Reported by Pylint.
Line: 156
Column: 5
"data, maindtype, expected_default, expected_other",
test_cases,
)
@pytest.mark.parametrize("params", product(*[(True, False)] * 5))
def test_convert_dtypes(
self, data, maindtype, params, expected_default, expected_other
):
warn = None
if (
Reported by Pylint.
Line: 156
Column: 5
"data, maindtype, expected_default, expected_other",
test_cases,
)
@pytest.mark.parametrize("params", product(*[(True, False)] * 5))
def test_convert_dtypes(
self, data, maindtype, params, expected_default, expected_other
):
warn = None
if (
Reported by Pylint.
Line: 156
Column: 5
"data, maindtype, expected_default, expected_other",
test_cases,
)
@pytest.mark.parametrize("params", product(*[(True, False)] * 5))
def test_convert_dtypes(
self, data, maindtype, params, expected_default, expected_other
):
warn = None
if (
Reported by Pylint.
Line: 216
Column: 5
# Make sure original not changed
tm.assert_series_equal(series, copy)
def test_convert_string_dtype(self, nullable_string_dtype):
# https://github.com/pandas-dev/pandas/issues/31731 -> converting columns
# that are already string dtype
df = pd.DataFrame(
{"A": ["a", "b", pd.NA], "B": ["ä", "ö", "ü"]}, dtype=nullable_string_dtype
)
Reported by Pylint.
Line: 216
Column: 5
# Make sure original not changed
tm.assert_series_equal(series, copy)
def test_convert_string_dtype(self, nullable_string_dtype):
# https://github.com/pandas-dev/pandas/issues/31731 -> converting columns
# that are already string dtype
df = pd.DataFrame(
{"A": ["a", "b", pd.NA], "B": ["ä", "ö", "ü"]}, dtype=nullable_string_dtype
)
Reported by Pylint.
Line: 219
Column: 9
def test_convert_string_dtype(self, nullable_string_dtype):
# https://github.com/pandas-dev/pandas/issues/31731 -> converting columns
# that are already string dtype
df = pd.DataFrame(
{"A": ["a", "b", pd.NA], "B": ["ä", "ö", "ü"]}, dtype=nullable_string_dtype
)
result = df.convert_dtypes()
tm.assert_frame_equal(df, result)
Reported by Pylint.