The following issues were found
pandas/tests/frame/methods/test_dtypes.py
19 issues
Line: 1
Column: 1
from datetime import timedelta
import numpy as np
from pandas.core.dtypes.dtypes import DatetimeTZDtype
import pandas as pd
from pandas import (
DataFrame,
Reported by Pylint.
Line: 17
Column: 1
import pandas._testing as tm
def _check_cast(df, v):
"""
Check if all dtypes of df are equal to v
"""
assert all(s.dtype.name == v for _, s in df.items())
Reported by Pylint.
Line: 17
Column: 1
import pandas._testing as tm
def _check_cast(df, v):
"""
Check if all dtypes of df are equal to v
"""
assert all(s.dtype.name == v for _, s in df.items())
Reported by Pylint.
Line: 21
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
"""
Check if all dtypes of df are equal to v
"""
assert all(s.dtype.name == v for _, s in df.items())
class TestDataFrameDataTypes:
def test_empty_frame_dtypes(self):
empty_df = DataFrame()
Reported by Bandit.
Line: 24
Column: 1
assert all(s.dtype.name == v for _, s in df.items())
class TestDataFrameDataTypes:
def test_empty_frame_dtypes(self):
empty_df = DataFrame()
tm.assert_series_equal(empty_df.dtypes, Series(dtype=object))
nocols_df = DataFrame(index=[1, 2, 3])
Reported by Pylint.
Line: 25
Column: 5
class TestDataFrameDataTypes:
def test_empty_frame_dtypes(self):
empty_df = DataFrame()
tm.assert_series_equal(empty_df.dtypes, Series(dtype=object))
nocols_df = DataFrame(index=[1, 2, 3])
tm.assert_series_equal(nocols_df.dtypes, Series(dtype=object))
Reported by Pylint.
Line: 25
Column: 5
class TestDataFrameDataTypes:
def test_empty_frame_dtypes(self):
empty_df = DataFrame()
tm.assert_series_equal(empty_df.dtypes, Series(dtype=object))
nocols_df = DataFrame(index=[1, 2, 3])
tm.assert_series_equal(nocols_df.dtypes, Series(dtype=object))
Reported by Pylint.
Line: 40
Column: 9
norows_int_df.dtypes, Series(np.dtype("int32"), index=list("abc"))
)
df = DataFrame({"a": 1, "b": True, "c": 1.0}, index=[1, 2, 3])
ex_dtypes = Series({"a": np.int64, "b": np.bool_, "c": np.float64})
tm.assert_series_equal(df.dtypes, ex_dtypes)
# same but for empty slice of df
tm.assert_series_equal(df[:0].dtypes, ex_dtypes)
Reported by Pylint.
Line: 47
Column: 5
# same but for empty slice of df
tm.assert_series_equal(df[:0].dtypes, ex_dtypes)
def test_datetime_with_tz_dtypes(self):
tzframe = DataFrame(
{
"A": date_range("20130101", periods=3),
"B": date_range("20130101", periods=3, tz="US/Eastern"),
"C": date_range("20130101", periods=3, tz="CET"),
Reported by Pylint.
Line: 47
Column: 5
# same but for empty slice of df
tm.assert_series_equal(df[:0].dtypes, ex_dtypes)
def test_datetime_with_tz_dtypes(self):
tzframe = DataFrame(
{
"A": date_range("20130101", periods=3),
"B": date_range("20130101", periods=3, tz="US/Eastern"),
"C": date_range("20130101", periods=3, tz="CET"),
Reported by Pylint.
pandas/tests/groupby/test_nunique.py
19 issues
Line: 5
Column: 1
from string import ascii_lowercase
import numpy as np
import pytest
import pandas as pd
from pandas import (
DataFrame,
MultiIndex,
Reported by Pylint.
Line: 1
Column: 1
import datetime as dt
from string import ascii_lowercase
import numpy as np
import pytest
import pandas as pd
from pandas import (
DataFrame,
Reported by Pylint.
Line: 24
Column: 1
@pytest.mark.parametrize("m", [10, 100, 1000])
@pytest.mark.parametrize("sort", [False, True])
@pytest.mark.parametrize("dropna", [False, True])
def test_series_groupby_nunique(n, m, sort, dropna):
def check_nunique(df, keys, as_index=True):
original_df = df.copy()
gr = df.groupby(keys, as_index=as_index, sort=sort)
left = gr["julie"].nunique(dropna=dropna)
Reported by Pylint.
Line: 24
Column: 1
@pytest.mark.parametrize("m", [10, 100, 1000])
@pytest.mark.parametrize("sort", [False, True])
@pytest.mark.parametrize("dropna", [False, True])
def test_series_groupby_nunique(n, m, sort, dropna):
def check_nunique(df, keys, as_index=True):
original_df = df.copy()
gr = df.groupby(keys, as_index=as_index, sort=sort)
left = gr["julie"].nunique(dropna=dropna)
Reported by Pylint.
Line: 24
Column: 1
@pytest.mark.parametrize("m", [10, 100, 1000])
@pytest.mark.parametrize("sort", [False, True])
@pytest.mark.parametrize("dropna", [False, True])
def test_series_groupby_nunique(n, m, sort, dropna):
def check_nunique(df, keys, as_index=True):
original_df = df.copy()
gr = df.groupby(keys, as_index=as_index, sort=sort)
left = gr["julie"].nunique(dropna=dropna)
Reported by Pylint.
Line: 25
Column: 5
@pytest.mark.parametrize("sort", [False, True])
@pytest.mark.parametrize("dropna", [False, True])
def test_series_groupby_nunique(n, m, sort, dropna):
def check_nunique(df, keys, as_index=True):
original_df = df.copy()
gr = df.groupby(keys, as_index=as_index, sort=sort)
left = gr["julie"].nunique(dropna=dropna)
gr = df.groupby(keys, as_index=as_index, sort=sort)
Reported by Pylint.
Line: 27
Column: 9
def test_series_groupby_nunique(n, m, sort, dropna):
def check_nunique(df, keys, as_index=True):
original_df = df.copy()
gr = df.groupby(keys, as_index=as_index, sort=sort)
left = gr["julie"].nunique(dropna=dropna)
gr = df.groupby(keys, as_index=as_index, sort=sort)
right = gr["julie"].apply(Series.nunique, dropna=dropna)
if not as_index:
Reported by Pylint.
Line: 30
Column: 9
gr = df.groupby(keys, as_index=as_index, sort=sort)
left = gr["julie"].nunique(dropna=dropna)
gr = df.groupby(keys, as_index=as_index, sort=sort)
right = gr["julie"].apply(Series.nunique, dropna=dropna)
if not as_index:
right = right.reset_index(drop=True)
if as_index:
Reported by Pylint.
Line: 66
Column: 1
check_nunique(frame, ["jim", "joe"], as_index=False)
def test_nunique():
df = DataFrame({"A": list("abbacc"), "B": list("abxacc"), "C": list("abbacx")})
expected = DataFrame({"A": list("abc"), "B": [1, 2, 1], "C": [1, 1, 2]})
result = df.groupby("A", as_index=False).nunique()
tm.assert_frame_equal(result, expected)
Reported by Pylint.
Line: 67
Column: 5
def test_nunique():
df = DataFrame({"A": list("abbacc"), "B": list("abxacc"), "C": list("abbacx")})
expected = DataFrame({"A": list("abc"), "B": [1, 2, 1], "C": [1, 1, 2]})
result = df.groupby("A", as_index=False).nunique()
tm.assert_frame_equal(result, expected)
Reported by Pylint.
pandas/io/parsers/c_parser_wrapper.py
19 issues
Line: 7
Column: 1
import numpy as np
import pandas._libs.parsers as parsers
from pandas._typing import (
ArrayLike,
FilePathOrBuffer,
)
from pandas.errors import DtypeWarning
Reported by Pylint.
Line: 7
Column: 1
import numpy as np
import pandas._libs.parsers as parsers
from pandas._typing import (
ArrayLike,
FilePathOrBuffer,
)
from pandas.errors import DtypeWarning
Reported by Pylint.
Line: 357
Column: 3
arrs = [chunk.pop(name) for chunk in chunks]
# Check each arr for consistent types.
dtypes = {a.dtype for a in arrs}
# TODO: shouldn't we exclude all EA dtypes here?
numpy_dtypes = {x for x in dtypes if not is_categorical_dtype(x)}
if len(numpy_dtypes) > 1:
# error: Argument 1 to "find_common_type" has incompatible type
# "Set[Any]"; expected "Sequence[Union[dtype[Any], None, type,
# _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any,
Reported by Pylint.
Line: 378
Column: 3
result[name] = union_categoricals(arrs, sort_categories=False)
else:
if isinstance(dtype, ExtensionDtype):
# TODO: concat_compat?
array_type = dtype.construct_array_type()
# error: Argument 1 to "_concat_same_type" of "ExtensionArray"
# has incompatible type "List[Union[ExtensionArray, ndarray]]";
# expected "Sequence[ExtensionArray]"
result[name] = array_type._concat_same_type(
Reported by Pylint.
Line: 383
Column: 32
# error: Argument 1 to "_concat_same_type" of "ExtensionArray"
# has incompatible type "List[Union[ExtensionArray, ndarray]]";
# expected "Sequence[ExtensionArray]"
result[name] = array_type._concat_same_type(
arrs # type: ignore[arg-type]
)
else:
result[name] = np.concatenate(arrs)
Reported by Pylint.
Line: 1
Column: 1
from __future__ import annotations
import warnings
import numpy as np
import pandas._libs.parsers as parsers
from pandas._typing import (
ArrayLike,
Reported by Pylint.
Line: 29
Column: 1
)
class CParserWrapper(ParserBase):
low_memory: bool
_reader: parsers.TextReader
def __init__(self, src: FilePathOrBuffer, **kwds):
self.kwds = kwds
Reported by Pylint.
Line: 29
Column: 1
)
class CParserWrapper(ParserBase):
low_memory: bool
_reader: parsers.TextReader
def __init__(self, src: FilePathOrBuffer, **kwds):
self.kwds = kwds
Reported by Pylint.
Line: 33
Column: 5
low_memory: bool
_reader: parsers.TextReader
def __init__(self, src: FilePathOrBuffer, **kwds):
self.kwds = kwds
kwds = kwds.copy()
ParserBase.__init__(self, kwds)
Reported by Pylint.
Line: 33
Column: 5
low_memory: bool
_reader: parsers.TextReader
def __init__(self, src: FilePathOrBuffer, **kwds):
self.kwds = kwds
kwds = kwds.copy()
ParserBase.__init__(self, kwds)
Reported by Pylint.
pandas/core/index.py
19 issues
Line: 3
Column: 1
import warnings
from pandas.core.indexes.api import ( # noqa:F401
CategoricalIndex,
DatetimeIndex,
Float64Index,
Index,
Int64Index,
IntervalIndex,
Reported by Pylint.
Line: 3
Column: 1
import warnings
from pandas.core.indexes.api import ( # noqa:F401
CategoricalIndex,
DatetimeIndex,
Float64Index,
Index,
Int64Index,
IntervalIndex,
Reported by Pylint.
Line: 3
Column: 1
import warnings
from pandas.core.indexes.api import ( # noqa:F401
CategoricalIndex,
DatetimeIndex,
Float64Index,
Index,
Int64Index,
IntervalIndex,
Reported by Pylint.
Line: 3
Column: 1
import warnings
from pandas.core.indexes.api import ( # noqa:F401
CategoricalIndex,
DatetimeIndex,
Float64Index,
Index,
Int64Index,
IntervalIndex,
Reported by Pylint.
Line: 3
Column: 1
import warnings
from pandas.core.indexes.api import ( # noqa:F401
CategoricalIndex,
DatetimeIndex,
Float64Index,
Index,
Int64Index,
IntervalIndex,
Reported by Pylint.
Line: 3
Column: 1
import warnings
from pandas.core.indexes.api import ( # noqa:F401
CategoricalIndex,
DatetimeIndex,
Float64Index,
Index,
Int64Index,
IntervalIndex,
Reported by Pylint.
Line: 3
Column: 1
import warnings
from pandas.core.indexes.api import ( # noqa:F401
CategoricalIndex,
DatetimeIndex,
Float64Index,
Index,
Int64Index,
IntervalIndex,
Reported by Pylint.
Line: 3
Column: 1
import warnings
from pandas.core.indexes.api import ( # noqa:F401
CategoricalIndex,
DatetimeIndex,
Float64Index,
Index,
Int64Index,
IntervalIndex,
Reported by Pylint.
Line: 3
Column: 1
import warnings
from pandas.core.indexes.api import ( # noqa:F401
CategoricalIndex,
DatetimeIndex,
Float64Index,
Index,
Int64Index,
IntervalIndex,
Reported by Pylint.
Line: 3
Column: 1
import warnings
from pandas.core.indexes.api import ( # noqa:F401
CategoricalIndex,
DatetimeIndex,
Float64Index,
Index,
Int64Index,
IntervalIndex,
Reported by Pylint.
pandas/tests/indexes/categorical/test_reindex.py
19 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
Categorical,
CategoricalIndex,
DataFrame,
Index,
Series,
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas import (
Categorical,
CategoricalIndex,
DataFrame,
Index,
Series,
Reported by Pylint.
Line: 14
Column: 1
import pandas._testing as tm
class TestReindex:
def test_reindex_dtype(self):
# GH#11586
ci = CategoricalIndex(["a", "b", "c", "a"])
with tm.assert_produces_warning(FutureWarning, match="non-unique"):
res, indexer = ci.reindex(["a", "c"])
Reported by Pylint.
Line: 15
Column: 5
class TestReindex:
def test_reindex_dtype(self):
# GH#11586
ci = CategoricalIndex(["a", "b", "c", "a"])
with tm.assert_produces_warning(FutureWarning, match="non-unique"):
res, indexer = ci.reindex(["a", "c"])
Reported by Pylint.
Line: 15
Column: 5
class TestReindex:
def test_reindex_dtype(self):
# GH#11586
ci = CategoricalIndex(["a", "b", "c", "a"])
with tm.assert_produces_warning(FutureWarning, match="non-unique"):
res, indexer = ci.reindex(["a", "c"])
Reported by Pylint.
Line: 17
Column: 9
class TestReindex:
def test_reindex_dtype(self):
# GH#11586
ci = CategoricalIndex(["a", "b", "c", "a"])
with tm.assert_produces_warning(FutureWarning, match="non-unique"):
res, indexer = ci.reindex(["a", "c"])
tm.assert_index_equal(res, Index(["a", "a", "c"]), exact=True)
tm.assert_numpy_array_equal(indexer, np.array([0, 3, 2], dtype=np.intp))
Reported by Pylint.
Line: 24
Column: 9
tm.assert_index_equal(res, Index(["a", "a", "c"]), exact=True)
tm.assert_numpy_array_equal(indexer, np.array([0, 3, 2], dtype=np.intp))
ci = CategoricalIndex(["a", "b", "c", "a"])
with tm.assert_produces_warning(FutureWarning, match="non-unique"):
res, indexer = ci.reindex(Categorical(["a", "c"]))
exp = CategoricalIndex(["a", "a", "c"], categories=["a", "c"])
tm.assert_index_equal(res, exp, exact=True)
Reported by Pylint.
Line: 32
Column: 9
tm.assert_index_equal(res, exp, exact=True)
tm.assert_numpy_array_equal(indexer, np.array([0, 3, 2], dtype=np.intp))
ci = CategoricalIndex(["a", "b", "c", "a"], categories=["a", "b", "c", "d"])
with tm.assert_produces_warning(FutureWarning, match="non-unique"):
res, indexer = ci.reindex(["a", "c"])
exp = Index(["a", "a", "c"], dtype="object")
tm.assert_index_equal(res, exp, exact=True)
tm.assert_numpy_array_equal(indexer, np.array([0, 3, 2], dtype=np.intp))
Reported by Pylint.
Line: 39
Column: 9
tm.assert_index_equal(res, exp, exact=True)
tm.assert_numpy_array_equal(indexer, np.array([0, 3, 2], dtype=np.intp))
ci = CategoricalIndex(["a", "b", "c", "a"], categories=["a", "b", "c", "d"])
with tm.assert_produces_warning(FutureWarning, match="non-unique"):
res, indexer = ci.reindex(Categorical(["a", "c"]))
exp = CategoricalIndex(["a", "a", "c"], categories=["a", "c"])
tm.assert_index_equal(res, exp, exact=True)
tm.assert_numpy_array_equal(indexer, np.array([0, 3, 2], dtype=np.intp))
Reported by Pylint.
Line: 46
Column: 5
tm.assert_index_equal(res, exp, exact=True)
tm.assert_numpy_array_equal(indexer, np.array([0, 3, 2], dtype=np.intp))
def test_reindex_duplicate_target(self):
# See GH25459
cat = CategoricalIndex(["a", "b", "c"], categories=["a", "b", "c", "d"])
res, indexer = cat.reindex(["a", "c", "c"])
exp = Index(["a", "c", "c"], dtype="object")
tm.assert_index_equal(res, exp, exact=True)
Reported by Pylint.
pandas/tests/tseries/holiday/test_holiday.py
19 issues
Line: 3
Column: 1
from datetime import datetime
import pytest
from pytz import utc
import pandas._testing as tm
from pandas.tseries.holiday import (
MO,
Reported by Pylint.
Line: 4
Column: 1
from datetime import datetime
import pytest
from pytz import utc
import pandas._testing as tm
from pandas.tseries.holiday import (
MO,
Reported by Pylint.
Line: 199
Column: 53
@pytest.mark.parametrize(
"transform", [lambda x: x.strftime("%Y-%m-%d"), lambda x: Timestamp(x)]
)
def test_argument_types(transform):
start_date = datetime(2011, 1, 1)
end_date = datetime(2020, 12, 31)
Reported by Pylint.
Line: 1
Column: 1
from datetime import datetime
import pytest
from pytz import utc
import pandas._testing as tm
from pandas.tseries.holiday import (
MO,
Reported by Pylint.
Line: 44
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
expected : list
The list of dates we expect to get.
"""
assert list(holiday.dates(start, end)) == expected
# Verify that timezone info is preserved.
assert list(
holiday.dates(utc.localize(Timestamp(start)), utc.localize(Timestamp(end)))
) == [utc.localize(dt) for dt in expected]
Reported by Bandit.
Line: 47
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert list(holiday.dates(start, end)) == expected
# Verify that timezone info is preserved.
assert list(
holiday.dates(utc.localize(Timestamp(start)), utc.localize(Timestamp(end)))
) == [utc.localize(dt) for dt in expected]
@pytest.mark.parametrize(
Reported by Bandit.
Line: 139
Column: 1
datetime(2018, 11, 22),
datetime(2019, 11, 28),
datetime(2020, 11, 26),
],
),
],
)
def test_holiday_dates(holiday, start_date, end_date, expected):
_check_holiday_results(holiday, start_date, end_date, expected)
Reported by Pylint.
Line: 180
Column: 1
("Christmas", "2011-12-26", "2011-12-26"),
("Christmas", datetime(2015, 7, 1), []),
("Christmas", "2011-12-25", []),
],
)
def test_holidays_within_dates(holiday, start, expected):
# see gh-11477
#
# Fix holiday behavior where holiday.dates returned dates outside
Reported by Pylint.
Line: 200
Column: 1
@pytest.mark.parametrize(
"transform", [lambda x: x.strftime("%Y-%m-%d"), lambda x: Timestamp(x)]
)
def test_argument_types(transform):
start_date = datetime(2011, 1, 1)
end_date = datetime(2020, 12, 31)
holidays = USThanksgivingDay.dates(start_date, end_date)
Reported by Pylint.
Line: 222
Column: 1
"start_date": datetime(2012, 1, 1),
"end_date": datetime(2012, 12, 31),
"offset": DateOffset(weekday=MO(1)),
},
),
],
)
def test_special_holidays(name, kwargs):
base_date = [datetime(2012, 5, 28)]
Reported by Pylint.
pandas/tests/indexes/period/test_monotonic.py
19 issues
Line: 1
Column: 1
from pandas import (
Period,
PeriodIndex,
)
def test_is_monotonic_increasing():
# GH#17717
p0 = Period("2017-09-01")
Reported by Pylint.
Line: 7
Column: 1
)
def test_is_monotonic_increasing():
# GH#17717
p0 = Period("2017-09-01")
p1 = Period("2017-09-02")
p2 = Period("2017-09-03")
Reported by Pylint.
Line: 9
Column: 5
def test_is_monotonic_increasing():
# GH#17717
p0 = Period("2017-09-01")
p1 = Period("2017-09-02")
p2 = Period("2017-09-03")
idx_inc0 = PeriodIndex([p0, p1, p2])
idx_inc1 = PeriodIndex([p0, p1, p1])
Reported by Pylint.
Line: 10
Column: 5
def test_is_monotonic_increasing():
# GH#17717
p0 = Period("2017-09-01")
p1 = Period("2017-09-02")
p2 = Period("2017-09-03")
idx_inc0 = PeriodIndex([p0, p1, p2])
idx_inc1 = PeriodIndex([p0, p1, p1])
idx_dec0 = PeriodIndex([p2, p1, p0])
Reported by Pylint.
Line: 11
Column: 5
# GH#17717
p0 = Period("2017-09-01")
p1 = Period("2017-09-02")
p2 = Period("2017-09-03")
idx_inc0 = PeriodIndex([p0, p1, p2])
idx_inc1 = PeriodIndex([p0, p1, p1])
idx_dec0 = PeriodIndex([p2, p1, p0])
idx_dec1 = PeriodIndex([p2, p1, p1])
Reported by Pylint.
Line: 19
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
idx_dec1 = PeriodIndex([p2, p1, p1])
idx = PeriodIndex([p1, p2, p0])
assert idx_inc0.is_monotonic_increasing is True
assert idx_inc1.is_monotonic_increasing is True
assert idx_dec0.is_monotonic_increasing is False
assert idx_dec1.is_monotonic_increasing is False
assert idx.is_monotonic_increasing is False
Reported by Bandit.
Line: 20
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
idx = PeriodIndex([p1, p2, p0])
assert idx_inc0.is_monotonic_increasing is True
assert idx_inc1.is_monotonic_increasing is True
assert idx_dec0.is_monotonic_increasing is False
assert idx_dec1.is_monotonic_increasing is False
assert idx.is_monotonic_increasing is False
Reported by Bandit.
Line: 21
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert idx_inc0.is_monotonic_increasing is True
assert idx_inc1.is_monotonic_increasing is True
assert idx_dec0.is_monotonic_increasing is False
assert idx_dec1.is_monotonic_increasing is False
assert idx.is_monotonic_increasing is False
def test_is_monotonic_decreasing():
Reported by Bandit.
Line: 22
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert idx_inc0.is_monotonic_increasing is True
assert idx_inc1.is_monotonic_increasing is True
assert idx_dec0.is_monotonic_increasing is False
assert idx_dec1.is_monotonic_increasing is False
assert idx.is_monotonic_increasing is False
def test_is_monotonic_decreasing():
# GH#17717
Reported by Bandit.
Line: 23
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert idx_inc1.is_monotonic_increasing is True
assert idx_dec0.is_monotonic_increasing is False
assert idx_dec1.is_monotonic_increasing is False
assert idx.is_monotonic_increasing is False
def test_is_monotonic_decreasing():
# GH#17717
p0 = Period("2017-09-01")
Reported by Bandit.
pandas/tests/io/json/test_compression.py
19 issues
Line: 3
Column: 1
from io import BytesIO
import pytest
import pandas.util._test_decorators as td
import pandas as pd
import pandas._testing as tm
Reported by Pylint.
Line: 1
Column: 1
from io import BytesIO
import pytest
import pandas.util._test_decorators as td
import pandas as pd
import pandas._testing as tm
Reported by Pylint.
Line: 11
Column: 1
import pandas._testing as tm
def test_compression_roundtrip(compression):
df = pd.DataFrame(
[[0.123456, 0.234567, 0.567567], [12.32112, 123123.2, 321321.2]],
index=["A", "B"],
columns=["X", "Y", "Z"],
)
Reported by Pylint.
Line: 12
Column: 5
def test_compression_roundtrip(compression):
df = pd.DataFrame(
[[0.123456, 0.234567, 0.567567], [12.32112, 123123.2, 321321.2]],
index=["A", "B"],
columns=["X", "Y", "Z"],
)
Reported by Pylint.
Line: 23
Column: 55
tm.assert_frame_equal(df, pd.read_json(path, compression=compression))
# explicitly ensure file was compressed.
with tm.decompress_file(path, compression) as fh:
result = fh.read().decode("utf8")
tm.assert_frame_equal(df, pd.read_json(result))
def test_read_zipped_json(datapath):
Reported by Pylint.
Line: 28
Column: 1
tm.assert_frame_equal(df, pd.read_json(result))
def test_read_zipped_json(datapath):
uncompressed_path = datapath("io", "json", "data", "tsframe_v012.json")
uncompressed_df = pd.read_json(uncompressed_path)
compressed_path = datapath("io", "json", "data", "tsframe_v012.json.zip")
compressed_df = pd.read_json(compressed_path, compression="zip")
Reported by Pylint.
Line: 39
Column: 1
@td.skip_if_not_us_locale
def test_with_s3_url(compression, s3_resource, s3so):
# Bucket "pandas-test" created in tests/io/conftest.py
df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}')
with tm.ensure_clean() as path:
Reported by Pylint.
Line: 42
Column: 5
def test_with_s3_url(compression, s3_resource, s3so):
# Bucket "pandas-test" created in tests/io/conftest.py
df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}')
with tm.ensure_clean() as path:
df.to_json(path, compression=compression)
with open(path, "rb") as f:
s3_resource.Bucket("pandas-test").put_object(Key="test-1", Body=f)
Reported by Pylint.
Line: 46
Column: 34
with tm.ensure_clean() as path:
df.to_json(path, compression=compression)
with open(path, "rb") as f:
s3_resource.Bucket("pandas-test").put_object(Key="test-1", Body=f)
roundtripped_df = pd.read_json(
"s3://pandas-test/test-1", compression=compression, storage_options=s3so
)
Reported by Pylint.
Line: 55
Column: 1
tm.assert_frame_equal(df, roundtripped_df)
def test_lines_with_compression(compression):
with tm.ensure_clean() as path:
df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}')
df.to_json(path, orient="records", lines=True, compression=compression)
roundtripped_df = pd.read_json(path, lines=True, compression=compression)
Reported by Pylint.
pandas/tests/series/indexing/test_mask.py
19 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
NA,
Series,
StringDtype,
)
import pandas._testing as tm
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas import (
NA,
Series,
StringDtype,
)
import pandas._testing as tm
Reported by Pylint.
Line: 12
Column: 1
import pandas._testing as tm
def test_mask():
# compare with tested results in test_where
s = Series(np.random.randn(5))
cond = s > 0
rs = s.where(~cond, np.nan)
Reported by Pylint.
Line: 14
Column: 5
def test_mask():
# compare with tested results in test_where
s = Series(np.random.randn(5))
cond = s > 0
rs = s.where(~cond, np.nan)
tm.assert_series_equal(rs, s.mask(cond))
Reported by Pylint.
Line: 17
Column: 5
s = Series(np.random.randn(5))
cond = s > 0
rs = s.where(~cond, np.nan)
tm.assert_series_equal(rs, s.mask(cond))
rs = s.where(~cond)
rs2 = s.mask(cond)
tm.assert_series_equal(rs, rs2)
Reported by Pylint.
Line: 20
Column: 5
rs = s.where(~cond, np.nan)
tm.assert_series_equal(rs, s.mask(cond))
rs = s.where(~cond)
rs2 = s.mask(cond)
tm.assert_series_equal(rs, rs2)
rs = s.where(~cond, -s)
rs2 = s.mask(cond, -s)
Reported by Pylint.
Line: 24
Column: 5
rs2 = s.mask(cond)
tm.assert_series_equal(rs, rs2)
rs = s.where(~cond, -s)
rs2 = s.mask(cond, -s)
tm.assert_series_equal(rs, rs2)
cond = Series([True, False, False, True, False], index=s.index)
s2 = -(s.abs())
Reported by Pylint.
Line: 29
Column: 5
tm.assert_series_equal(rs, rs2)
cond = Series([True, False, False, True, False], index=s.index)
s2 = -(s.abs())
rs = s2.where(~cond[:3])
rs2 = s2.mask(cond[:3])
tm.assert_series_equal(rs, rs2)
rs = s2.where(~cond[:3], -s2)
Reported by Pylint.
Line: 30
Column: 5
cond = Series([True, False, False, True, False], index=s.index)
s2 = -(s.abs())
rs = s2.where(~cond[:3])
rs2 = s2.mask(cond[:3])
tm.assert_series_equal(rs, rs2)
rs = s2.where(~cond[:3], -s2)
rs2 = s2.mask(cond[:3], -s2)
Reported by Pylint.
Line: 34
Column: 5
rs2 = s2.mask(cond[:3])
tm.assert_series_equal(rs, rs2)
rs = s2.where(~cond[:3], -s2)
rs2 = s2.mask(cond[:3], -s2)
tm.assert_series_equal(rs, rs2)
msg = "Array conditional must be same shape as self"
with pytest.raises(ValueError, match=msg):
Reported by Pylint.
pandas/tests/series/methods/test_count.py
19 issues
Line: 2
Column: 1
import numpy as np
import pytest
import pandas as pd
from pandas import (
Categorical,
MultiIndex,
Series,
)
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
import pandas as pd
from pandas import (
Categorical,
MultiIndex,
Series,
)
Reported by Pylint.
Line: 13
Column: 1
import pandas._testing as tm
class TestSeriesCount:
def test_count_level_series(self):
index = MultiIndex(
levels=[["foo", "bar", "baz"], ["one", "two", "three", "four"]],
codes=[[0, 0, 0, 2, 2], [2, 0, 1, 1, 2]],
)
Reported by Pylint.
Line: 14
Column: 5
class TestSeriesCount:
def test_count_level_series(self):
index = MultiIndex(
levels=[["foo", "bar", "baz"], ["one", "two", "three", "four"]],
codes=[[0, 0, 0, 2, 2], [2, 0, 1, 1, 2]],
)
Reported by Pylint.
Line: 14
Column: 5
class TestSeriesCount:
def test_count_level_series(self):
index = MultiIndex(
levels=[["foo", "bar", "baz"], ["one", "two", "three", "four"]],
codes=[[0, 0, 0, 2, 2], [2, 0, 1, 1, 2]],
)
Reported by Pylint.
Line: 36
Column: 5
result.astype("f8"), expected.reindex(result.index).fillna(0)
)
def test_count_multiindex(self, series_with_multilevel_index):
ser = series_with_multilevel_index
series = ser.copy()
series.index.names = ["a", "b"]
Reported by Pylint.
Line: 36
Column: 5
result.astype("f8"), expected.reindex(result.index).fillna(0)
)
def test_count_multiindex(self, series_with_multilevel_index):
ser = series_with_multilevel_index
series = ser.copy()
series.index.names = ["a", "b"]
Reported by Pylint.
Line: 59
Column: 5
with tm.assert_produces_warning(FutureWarning):
series.count("x")
def test_count_level_without_multiindex(self):
ser = Series(range(3))
msg = "Series.count level is only valid with a MultiIndex"
with pytest.raises(ValueError, match=msg):
with tm.assert_produces_warning(FutureWarning):
Reported by Pylint.
Line: 59
Column: 5
with tm.assert_produces_warning(FutureWarning):
series.count("x")
def test_count_level_without_multiindex(self):
ser = Series(range(3))
msg = "Series.count level is only valid with a MultiIndex"
with pytest.raises(ValueError, match=msg):
with tm.assert_produces_warning(FutureWarning):
Reported by Pylint.
Line: 67
Column: 5
with tm.assert_produces_warning(FutureWarning):
ser.count(level=1)
def test_count(self, datetime_series):
assert datetime_series.count() == len(datetime_series)
datetime_series[::2] = np.NaN
assert datetime_series.count() == np.isfinite(datetime_series).sum()
Reported by Pylint.