The following issues were found
pandas/tests/tseries/offsets/test_year.py
23 issues
Line: 8
Column: 1
"""
from datetime import datetime
import pytest
from pandas.tests.tseries.offsets.common import (
Base,
assert_is_on_offset,
assert_offset_equal,
Reported by Pylint.
Line: 22
Column: 1
)
class TestYearBegin(Base):
_offset = YearBegin
def test_misspecified(self):
with pytest.raises(ValueError, match="Month must go from 1 to 12"):
YearBegin(month=13)
Reported by Pylint.
Line: 25
Column: 5
class TestYearBegin(Base):
_offset = YearBegin
def test_misspecified(self):
with pytest.raises(ValueError, match="Month must go from 1 to 12"):
YearBegin(month=13)
offset_cases = []
offset_cases.append(
Reported by Pylint.
Line: 25
Column: 5
class TestYearBegin(Base):
_offset = YearBegin
def test_misspecified(self):
with pytest.raises(ValueError, match="Month must go from 1 to 12"):
YearBegin(month=13)
offset_cases = []
offset_cases.append(
Reported by Pylint.
Line: 158
Column: 5
)
@pytest.mark.parametrize("case", offset_cases)
def test_offset(self, case):
offset, cases = case
for base, expected in cases.items():
assert_offset_equal(offset, base, expected)
on_offset_cases = [
Reported by Pylint.
Line: 158
Column: 5
)
@pytest.mark.parametrize("case", offset_cases)
def test_offset(self, case):
offset, cases = case
for base, expected in cases.items():
assert_offset_equal(offset, base, expected)
on_offset_cases = [
Reported by Pylint.
Line: 171
Column: 5
]
@pytest.mark.parametrize("case", on_offset_cases)
def test_is_on_offset(self, case):
offset, dt, expected = case
assert_is_on_offset(offset, dt, expected)
class TestYearEnd(Base):
Reported by Pylint.
Line: 171
Column: 5
]
@pytest.mark.parametrize("case", on_offset_cases)
def test_is_on_offset(self, case):
offset, dt, expected = case
assert_is_on_offset(offset, dt, expected)
class TestYearEnd(Base):
Reported by Pylint.
Line: 172
Column: 17
@pytest.mark.parametrize("case", on_offset_cases)
def test_is_on_offset(self, case):
offset, dt, expected = case
assert_is_on_offset(offset, dt, expected)
class TestYearEnd(Base):
_offset = YearEnd
Reported by Pylint.
Line: 176
Column: 1
assert_is_on_offset(offset, dt, expected)
class TestYearEnd(Base):
_offset = YearEnd
def test_misspecified(self):
with pytest.raises(ValueError, match="Month must go from 1 to 12"):
YearEnd(month=13)
Reported by Pylint.
pandas/tests/io/parser/dtypes/test_dtypes_basic.py
22 issues
Line: 8
Column: 1
from io import StringIO
import numpy as np
import pytest
from pandas.errors import ParserWarning
import pandas as pd
from pandas import (
Reported by Pylint.
Line: 103
Column: 65
# Dtype spec ignored if converted specified.
with tm.assert_produces_warning(ParserWarning):
result = parser.read_csv(
StringIO(data), dtype={"a": "i8"}, converters={"a": lambda x: str(x)}
)
expected = DataFrame({"a": ["1.1", "1.2"], "b": [2.2, 2.3]})
tm.assert_frame_equal(result, expected)
Reported by Pylint.
Line: 205
Column: 62
decimal_number_check(c_parser_only, numeric_decimal, thousands, float_precision)
def decimal_number_check(parser, numeric_decimal, thousands, float_precision):
# GH#31920
value = numeric_decimal[0]
if thousands is None and "_" in value:
pytest.skip("Skip test if no thousands sep is defined and sep is in value")
df = parser.read_csv(
Reported by Pylint.
Line: 22
Column: 1
@pytest.mark.parametrize("dtype", [str, object])
@pytest.mark.parametrize("check_orig", [True, False])
def test_dtype_all_columns(all_parsers, dtype, check_orig):
# see gh-3795, gh-6607
parser = all_parsers
df = DataFrame(
np.random.rand(5, 2).round(4),
Reported by Pylint.
Line: 26
Column: 5
# see gh-3795, gh-6607
parser = all_parsers
df = DataFrame(
np.random.rand(5, 2).round(4),
columns=list("AB"),
index=["1A", "1B", "1C", "1D", "1E"],
)
Reported by Pylint.
Line: 46
Column: 1
tm.assert_frame_equal(result, expected)
def test_dtype_per_column(all_parsers):
parser = all_parsers
data = """\
one,two
1,2.5
2,3.5
Reported by Pylint.
Line: 64
Column: 1
tm.assert_frame_equal(result, expected)
def test_invalid_dtype_per_column(all_parsers):
parser = all_parsers
data = """\
one,two
1,2.5
2,3.5
Reported by Pylint.
Line: 77
Column: 1
parser.read_csv(StringIO(data), dtype={"one": "foo", 1: "int"})
def test_raise_on_passed_int_dtype_with_nas(all_parsers):
# see gh-2631
parser = all_parsers
data = """YEAR, DOY, a
2001,106380451,10
2001,,11
Reported by Pylint.
Line: 94
Column: 1
parser.read_csv(StringIO(data), dtype={"DOY": np.int64}, skipinitialspace=True)
def test_dtype_with_converters(all_parsers):
parser = all_parsers
data = """a,b
1.1,2.2
1.2,2.3"""
Reported by Pylint.
Line: 111
Column: 1
@pytest.mark.parametrize(
"dtype", list(np.typecodes["AllInteger"] + np.typecodes["Float"])
)
def test_numeric_dtype(all_parsers, dtype):
data = "0\n1"
parser = all_parsers
expected = DataFrame([0, 1], dtype=dtype)
Reported by Pylint.
pandas/tests/tseries/holiday/test_observance.py
22 issues
Line: 3
Column: 1
from datetime import datetime
import pytest
from pandas.tseries.holiday import (
after_nearest_workday,
before_nearest_workday,
nearest_workday,
next_monday,
Reported by Pylint.
Line: 1
Column: 1
from datetime import datetime
import pytest
from pandas.tseries.holiday import (
after_nearest_workday,
before_nearest_workday,
nearest_workday,
next_monday,
Reported by Pylint.
Line: 29
Column: 1
@pytest.mark.parametrize("day", [_SATURDAY, _SUNDAY])
def test_next_monday(day):
assert next_monday(day) == _MONDAY
@pytest.mark.parametrize(
"day,expected", [(_SATURDAY, _MONDAY), (_SUNDAY, _TUESDAY), (_MONDAY, _TUESDAY)]
Reported by Pylint.
Line: 30
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
@pytest.mark.parametrize("day", [_SATURDAY, _SUNDAY])
def test_next_monday(day):
assert next_monday(day) == _MONDAY
@pytest.mark.parametrize(
"day,expected", [(_SATURDAY, _MONDAY), (_SUNDAY, _TUESDAY), (_MONDAY, _TUESDAY)]
)
Reported by Bandit.
Line: 35
Column: 1
@pytest.mark.parametrize(
"day,expected", [(_SATURDAY, _MONDAY), (_SUNDAY, _TUESDAY), (_MONDAY, _TUESDAY)]
)
def test_next_monday_or_tuesday(day, expected):
assert next_monday_or_tuesday(day) == expected
@pytest.mark.parametrize("day", [_SATURDAY, _SUNDAY])
Reported by Pylint.
Line: 37
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
"day,expected", [(_SATURDAY, _MONDAY), (_SUNDAY, _TUESDAY), (_MONDAY, _TUESDAY)]
)
def test_next_monday_or_tuesday(day, expected):
assert next_monday_or_tuesday(day) == expected
@pytest.mark.parametrize("day", [_SATURDAY, _SUNDAY])
def test_previous_friday(day):
assert previous_friday(day) == _FRIDAY
Reported by Bandit.
Line: 41
Column: 1
@pytest.mark.parametrize("day", [_SATURDAY, _SUNDAY])
def test_previous_friday(day):
assert previous_friday(day) == _FRIDAY
def test_sunday_to_monday():
assert sunday_to_monday(_SUNDAY) == _MONDAY
Reported by Pylint.
Line: 42
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
@pytest.mark.parametrize("day", [_SATURDAY, _SUNDAY])
def test_previous_friday(day):
assert previous_friday(day) == _FRIDAY
def test_sunday_to_monday():
assert sunday_to_monday(_SUNDAY) == _MONDAY
Reported by Bandit.
Line: 45
Column: 1
assert previous_friday(day) == _FRIDAY
def test_sunday_to_monday():
assert sunday_to_monday(_SUNDAY) == _MONDAY
@pytest.mark.parametrize(
"day,expected", [(_SATURDAY, _FRIDAY), (_SUNDAY, _MONDAY), (_MONDAY, _MONDAY)]
Reported by Pylint.
Line: 46
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def test_sunday_to_monday():
assert sunday_to_monday(_SUNDAY) == _MONDAY
@pytest.mark.parametrize(
"day,expected", [(_SATURDAY, _FRIDAY), (_SUNDAY, _MONDAY), (_MONDAY, _MONDAY)]
)
Reported by Bandit.
pandas/tests/indexes/period/methods/test_shift.py
22 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
PeriodIndex,
period_range,
)
import pandas._testing as tm
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas import (
PeriodIndex,
period_range,
)
import pandas._testing as tm
Reported by Pylint.
Line: 11
Column: 1
import pandas._testing as tm
class TestPeriodIndexShift:
# ---------------------------------------------------------------
# PeriodIndex.shift is used by __add__ and __sub__
def test_pi_shift_ndarray(self):
idx = PeriodIndex(
Reported by Pylint.
Line: 15
Column: 5
# ---------------------------------------------------------------
# PeriodIndex.shift is used by __add__ and __sub__
def test_pi_shift_ndarray(self):
idx = PeriodIndex(
["2011-01", "2011-02", "NaT", "2011-04"], freq="M", name="idx"
)
result = idx.shift(np.array([1, 2, 3, 4]))
expected = PeriodIndex(
Reported by Pylint.
Line: 15
Column: 5
# ---------------------------------------------------------------
# PeriodIndex.shift is used by __add__ and __sub__
def test_pi_shift_ndarray(self):
idx = PeriodIndex(
["2011-01", "2011-02", "NaT", "2011-04"], freq="M", name="idx"
)
result = idx.shift(np.array([1, 2, 3, 4]))
expected = PeriodIndex(
Reported by Pylint.
Line: 31
Column: 5
)
tm.assert_index_equal(result, expected)
def test_shift(self):
pi1 = period_range(freq="A", start="1/1/2001", end="12/1/2009")
pi2 = period_range(freq="A", start="1/1/2002", end="12/1/2010")
tm.assert_index_equal(pi1.shift(0), pi1)
Reported by Pylint.
Line: 31
Column: 5
)
tm.assert_index_equal(result, expected)
def test_shift(self):
pi1 = period_range(freq="A", start="1/1/2001", end="12/1/2009")
pi2 = period_range(freq="A", start="1/1/2002", end="12/1/2010")
tm.assert_index_equal(pi1.shift(0), pi1)
Reported by Pylint.
Line: 37
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
tm.assert_index_equal(pi1.shift(0), pi1)
assert len(pi1) == len(pi2)
tm.assert_index_equal(pi1.shift(1), pi2)
pi1 = period_range(freq="A", start="1/1/2001", end="12/1/2009")
pi2 = period_range(freq="A", start="1/1/2000", end="12/1/2008")
assert len(pi1) == len(pi2)
Reported by Bandit.
Line: 42
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
pi1 = period_range(freq="A", start="1/1/2001", end="12/1/2009")
pi2 = period_range(freq="A", start="1/1/2000", end="12/1/2008")
assert len(pi1) == len(pi2)
tm.assert_index_equal(pi1.shift(-1), pi2)
pi1 = period_range(freq="M", start="1/1/2001", end="12/1/2009")
pi2 = period_range(freq="M", start="2/1/2001", end="1/1/2010")
assert len(pi1) == len(pi2)
Reported by Bandit.
Line: 47
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
pi1 = period_range(freq="M", start="1/1/2001", end="12/1/2009")
pi2 = period_range(freq="M", start="2/1/2001", end="1/1/2010")
assert len(pi1) == len(pi2)
tm.assert_index_equal(pi1.shift(1), pi2)
pi1 = period_range(freq="M", start="1/1/2001", end="12/1/2009")
pi2 = period_range(freq="M", start="12/1/2000", end="11/1/2009")
assert len(pi1) == len(pi2)
Reported by Bandit.
pandas/tests/indexing/test_indexers.py
22 issues
Line: 3
Column: 1
# Tests aimed at pandas.core.indexers
import numpy as np
import pytest
from pandas.core.indexers import (
is_scalar_indexer,
length_of_indexer,
validate_indices,
)
Reported by Pylint.
Line: 1
Column: 1
# Tests aimed at pandas.core.indexers
import numpy as np
import pytest
from pandas.core.indexers import (
is_scalar_indexer,
length_of_indexer,
validate_indices,
)
Reported by Pylint.
Line: 12
Column: 1
)
def test_length_of_indexer():
arr = np.zeros(4, dtype=bool)
arr[0] = 1
result = length_of_indexer(arr)
assert result == 1
Reported by Pylint.
Line: 16
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
arr = np.zeros(4, dtype=bool)
arr[0] = 1
result = length_of_indexer(arr)
assert result == 1
def test_is_scalar_indexer():
indexer = (0, 1)
assert is_scalar_indexer(indexer, 2)
Reported by Bandit.
Line: 19
Column: 1
assert result == 1
def test_is_scalar_indexer():
indexer = (0, 1)
assert is_scalar_indexer(indexer, 2)
assert not is_scalar_indexer(indexer[0], 2)
indexer = (np.array([2]), 1)
Reported by Pylint.
Line: 21
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def test_is_scalar_indexer():
indexer = (0, 1)
assert is_scalar_indexer(indexer, 2)
assert not is_scalar_indexer(indexer[0], 2)
indexer = (np.array([2]), 1)
assert is_scalar_indexer(indexer, 2)
Reported by Bandit.
Line: 22
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def test_is_scalar_indexer():
indexer = (0, 1)
assert is_scalar_indexer(indexer, 2)
assert not is_scalar_indexer(indexer[0], 2)
indexer = (np.array([2]), 1)
assert is_scalar_indexer(indexer, 2)
indexer = (np.array([2]), np.array([3]))
Reported by Bandit.
Line: 25
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert not is_scalar_indexer(indexer[0], 2)
indexer = (np.array([2]), 1)
assert is_scalar_indexer(indexer, 2)
indexer = (np.array([2]), np.array([3]))
assert is_scalar_indexer(indexer, 2)
indexer = (np.array([2]), np.array([3, 4]))
Reported by Bandit.
Line: 28
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert is_scalar_indexer(indexer, 2)
indexer = (np.array([2]), np.array([3]))
assert is_scalar_indexer(indexer, 2)
indexer = (np.array([2]), np.array([3, 4]))
assert not is_scalar_indexer(indexer, 2)
assert not is_scalar_indexer(slice(None), 1)
Reported by Bandit.
Line: 31
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert is_scalar_indexer(indexer, 2)
indexer = (np.array([2]), np.array([3, 4]))
assert not is_scalar_indexer(indexer, 2)
assert not is_scalar_indexer(slice(None), 1)
indexer = 0
assert is_scalar_indexer(indexer, 1)
Reported by Bandit.
pandas/tests/io/parser/common/test_read_errors.py
22 issues
Line: 13
Column: 1
import warnings
import numpy as np
import pytest
from pandas.errors import (
EmptyDataError,
ParserError,
)
Reported by Pylint.
Line: 25
Column: 1
import pandas._testing as tm
def test_empty_decimal_marker(all_parsers):
data = """A|B|C
1|2,334|5
10|13|10.
"""
# Parsers support only length-1 decimals
Reported by Pylint.
Line: 38
Column: 1
parser.read_csv(StringIO(data), decimal="")
def test_bad_stream_exception(all_parsers, csv_dir_path):
# see gh-13652
#
# This test validates that both the Python engine and C engine will
# raise UnicodeDecodeError instead of C engine raising ParserError
# and swallowing the exception that caused read to fail.
Reported by Pylint.
Line: 59
Column: 1
parser.read_csv(stream)
def test_malformed(all_parsers):
# see gh-6607
parser = all_parsers
data = """ignore
A,B,C
1,2,3 # comment
Reported by Pylint.
Line: 74
Column: 1
@pytest.mark.parametrize("nrows", [5, 3, None])
def test_malformed_chunks(all_parsers, nrows):
data = """ignore
A,B,C
skip
1,2,3
3,5,10 # comment
Reported by Pylint.
Line: 92
Column: 1
reader.read(nrows)
def test_catch_too_many_names(all_parsers):
# see gh-5156
data = """\
1,2,3
4,,6
7,8,9
Reported by Pylint.
Line: 112
Column: 1
@pytest.mark.parametrize("nrows", [0, 1, 2, 3, 4, 5])
def test_raise_on_no_columns(all_parsers, nrows):
parser = all_parsers
data = "\n" * nrows
msg = "No columns to parse from file"
with pytest.raises(EmptyDataError, match=msg):
Reported by Pylint.
Line: 121
Column: 1
parser.read_csv(StringIO(data))
def test_read_csv_raises_on_header_prefix(all_parsers):
# gh-27394
parser = all_parsers
msg = "Argument prefix must be None if argument header is not None"
s = StringIO("0,1\n2,3")
Reported by Pylint.
Line: 126
Column: 5
parser = all_parsers
msg = "Argument prefix must be None if argument header is not None"
s = StringIO("0,1\n2,3")
with pytest.raises(ValueError, match=msg):
parser.read_csv(s, header=0, prefix="_X")
Reported by Pylint.
Line: 132
Column: 1
parser.read_csv(s, header=0, prefix="_X")
def test_unexpected_keyword_parameter_exception(all_parsers):
# GH-34976
parser = all_parsers
msg = "{}\\(\\) got an unexpected keyword argument 'foo'"
with pytest.raises(TypeError, match=msg.format("read_csv")):
Reported by Pylint.
pandas/io/formats/info.py
22 issues
Line: 165
Column: 24
# categories)
if (
"object" in self.dtype_counts
or self.data.index._is_memory_usage_qualified()
):
size_qualifier = "+"
return size_qualifier
@abstractmethod
Reported by Pylint.
Line: 449
Column: 28
def add_index_range_line(self) -> None:
"""Add line with range of indices to the table."""
self._lines.append(self.data.index._summary())
def add_dtypes_line(self) -> None:
"""Add summary line with dtypes present in dataframe."""
collected_dtypes = [
f"{key}({val:d})" for key, val in sorted(self.dtype_counts.items())
Reported by Pylint.
Line: 525
Column: 28
self.add_memory_usage_line()
def add_columns_summary_line(self) -> None:
self._lines.append(self.ids._summary(name="Columns"))
class TableBuilderVerboseMixin(TableBuilderAbstract):
"""
Mixin for verbose info output.
Reported by Pylint.
Line: 626
Column: 5
Dataframe info table builder for verbose output.
"""
def __init__(
self,
*,
info: DataFrameInfo,
with_counts: bool,
):
Reported by Pylint.
Line: 1
Column: 1
from __future__ import annotations
from abc import (
ABC,
abstractmethod,
)
import sys
from typing import (
IO,
Reported by Pylint.
Line: 33
Column: 1
)
def _put_str(s: str | Dtype, space: int) -> str:
"""
Make string of specified length, padding to the right if necessary.
Parameters
----------
Reported by Pylint.
Line: 83
Column: 9
>>> _sizeof_fmt(23028, '+')
'22.5+ KB'
"""
for x in ["bytes", "KB", "MB", "GB", "TB"]:
if num < 1024.0:
return f"{num:3.1f}{size_qualifier} {x}"
num /= 1024.0
return f"{num:3.1f}{size_qualifier} PB"
Reported by Pylint.
Line: 156
Column: 5
return f"{_sizeof_fmt(self.memory_usage_bytes, self.size_qualifier)}\n"
@property
def size_qualifier(self) -> str:
size_qualifier = ""
if self.memory_usage:
if self.memory_usage != "deep":
# size_qualifier is just a best effort; not guaranteed to catch
# all cases (e.g., it misses categorical data even with object
Reported by Pylint.
Line: 281
Column: 9
@property
def memory_usage_bytes(self) -> int:
if self.memory_usage == "deep":
deep = True
else:
deep = False
return self.data.memory_usage(index=True, deep=deep).sum()
Reported by Pylint.
Line: 304
Column: 1
printer.to_buffer(buf)
class InfoPrinterAbstract:
"""
Class for printing dataframe or series info.
"""
def to_buffer(self, buf: IO[str] | None = None) -> None:
Reported by Pylint.
pandas/tests/frame/methods/test_pct_change.py
22 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
DataFrame,
Series,
)
import pandas._testing as tm
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas import (
DataFrame,
Series,
)
import pandas._testing as tm
Reported by Pylint.
Line: 11
Column: 1
import pandas._testing as tm
class TestDataFramePctChange:
@pytest.mark.parametrize(
"periods,fill_method,limit,exp",
[
(1, "ffill", None, [np.nan, np.nan, np.nan, 1, 1, 1.5, 0, 0]),
(1, "ffill", 1, [np.nan, np.nan, np.nan, 1, 1, 1.5, 0, np.nan]),
Reported by Pylint.
Line: 24
Column: 5
(-1, "bfill", None, [0, 0, -0.5, -0.5, -0.6, np.nan, np.nan, np.nan]),
(-1, "bfill", 1, [np.nan, 0, -0.5, -0.5, -0.6, np.nan, np.nan, np.nan]),
],
)
@pytest.mark.parametrize("klass", [DataFrame, Series])
def test_pct_change_with_nas(self, periods, fill_method, limit, exp, klass):
vals = [np.nan, np.nan, 1, 2, 4, 10, np.nan, np.nan]
obj = klass(vals)
Reported by Pylint.
Line: 24
Column: 5
(-1, "bfill", None, [0, 0, -0.5, -0.5, -0.6, np.nan, np.nan, np.nan]),
(-1, "bfill", 1, [np.nan, 0, -0.5, -0.5, -0.6, np.nan, np.nan, np.nan]),
],
)
@pytest.mark.parametrize("klass", [DataFrame, Series])
def test_pct_change_with_nas(self, periods, fill_method, limit, exp, klass):
vals = [np.nan, np.nan, 1, 2, 4, 10, np.nan, np.nan]
obj = klass(vals)
Reported by Pylint.
Line: 24
Column: 5
(-1, "bfill", None, [0, 0, -0.5, -0.5, -0.6, np.nan, np.nan, np.nan]),
(-1, "bfill", 1, [np.nan, 0, -0.5, -0.5, -0.6, np.nan, np.nan, np.nan]),
],
)
@pytest.mark.parametrize("klass", [DataFrame, Series])
def test_pct_change_with_nas(self, periods, fill_method, limit, exp, klass):
vals = [np.nan, np.nan, 1, 2, 4, 10, np.nan, np.nan]
obj = klass(vals)
Reported by Pylint.
Line: 33
Column: 5
res = obj.pct_change(periods=periods, fill_method=fill_method, limit=limit)
tm.assert_equal(res, klass(exp))
def test_pct_change_numeric(self):
# GH#11150
pnl = DataFrame(
[np.arange(0, 40, 10), np.arange(0, 40, 10), np.arange(0, 40, 10)]
).astype(np.float64)
pnl.iat[1, 0] = np.nan
Reported by Pylint.
Line: 33
Column: 5
res = obj.pct_change(periods=periods, fill_method=fill_method, limit=limit)
tm.assert_equal(res, klass(exp))
def test_pct_change_numeric(self):
# GH#11150
pnl = DataFrame(
[np.arange(0, 40, 10), np.arange(0, 40, 10), np.arange(0, 40, 10)]
).astype(np.float64)
pnl.iat[1, 0] = np.nan
Reported by Pylint.
Line: 48
Column: 5
tm.assert_frame_equal(result, expected)
def test_pct_change(self, datetime_frame):
rs = datetime_frame.pct_change(fill_method=None)
tm.assert_frame_equal(rs, datetime_frame / datetime_frame.shift(1) - 1)
rs = datetime_frame.pct_change(2)
filled = datetime_frame.fillna(method="pad")
Reported by Pylint.
Line: 48
Column: 5
tm.assert_frame_equal(result, expected)
def test_pct_change(self, datetime_frame):
rs = datetime_frame.pct_change(fill_method=None)
tm.assert_frame_equal(rs, datetime_frame / datetime_frame.shift(1) - 1)
rs = datetime_frame.pct_change(2)
filled = datetime_frame.fillna(method="pad")
Reported by Pylint.
pandas/core/arrays/floating.py
22 issues
Line: 7
Column: 1
import numpy as np
from pandas._libs import (
lib,
missing as libmissing,
)
from pandas._typing import (
ArrayLike,
Reported by Pylint.
Line: 7
Column: 1
import numpy as np
from pandas._libs import (
lib,
missing as libmissing,
)
from pandas._typing import (
ArrayLike,
Reported by Pylint.
Line: 53
Column: 19
"""
def __repr__(self) -> str:
return f"{self.name}Dtype()"
@property
def _is_numeric(self) -> bool:
return True
Reported by Pylint.
Line: 121
Column: 24
raise ValueError(f"invalid dtype specified {dtype}") from err
if isinstance(values, FloatingArray):
values, mask = values._data, values._mask
if dtype is not None:
values = values.astype(dtype.numpy_dtype, copy=False)
if copy:
values = values.copy()
Reported by Pylint.
Line: 121
Column: 38
raise ValueError(f"invalid dtype specified {dtype}") from err
if isinstance(values, FloatingArray):
values, mask = values._data, values._mask
if dtype is not None:
values = values.astype(dtype.numpy_dtype, copy=False)
if copy:
values = values.copy()
Reported by Pylint.
Line: 171
Column: 3
# safely cast
# we copy as need to coerce here
# TODO should this be a safe cast?
if mask.any():
values = values.copy()
values[mask] = np.nan
values = values.astype(dtype, copy=False) # , casting="safe")
else:
Reported by Pylint.
Line: 246
Column: 5
_internal_fill_value = 0.0
@cache_readonly
def dtype(self) -> FloatingDtype:
return FLOAT_STR_TO_DTYPE[str(self._data.dtype)]
def __init__(self, values: np.ndarray, mask: np.ndarray, copy: bool = False):
if not (isinstance(values, np.ndarray) and values.dtype.kind == "f"):
raise TypeError(
Reported by Pylint.
Line: 271
Column: 5
scalars = to_numeric(strings, errors="raise")
return cls._from_sequence(scalars, dtype=dtype, copy=copy)
def _coerce_to_array(self, value) -> tuple[np.ndarray, np.ndarray]:
return coerce_to_array(value, dtype=self.dtype)
def astype(self, dtype, copy: bool = True) -> ArrayLike:
"""
Cast to a NumPy array or ExtensionArray with 'dtype'.
Reported by Pylint.
Line: 332
Column: 27
mask = None
if isinstance(other, (BooleanArray, IntegerArray, FloatingArray)):
other, mask = other._data, other._mask
elif is_list_like(other):
other = np.asarray(other)
if other.ndim > 1:
raise NotImplementedError("can only perform ops with 1-d structures")
Reported by Pylint.
Line: 332
Column: 40
mask = None
if isinstance(other, (BooleanArray, IntegerArray, FloatingArray)):
other, mask = other._data, other._mask
elif is_list_like(other):
other = np.asarray(other)
if other.ndim > 1:
raise NotImplementedError("can only perform ops with 1-d structures")
Reported by Pylint.
asv_bench/benchmarks/boolean.py
22 issues
Line: 3
Column: 1
import numpy as np
import pandas as pd
class TimeLogicalOps:
def setup(self):
N = 10_000
left, right, lmask, rmask = np.random.randint(0, 2, size=(4, N)).astype("bool")
Reported by Pylint.
Line: 10
Column: 9
def setup(self):
N = 10_000
left, right, lmask, rmask = np.random.randint(0, 2, size=(4, N)).astype("bool")
self.left = pd.arrays.BooleanArray(left, lmask)
self.right = pd.arrays.BooleanArray(right, rmask)
def time_or_scalar(self):
self.left | True
self.left | False
Reported by Pylint.
Line: 11
Column: 9
N = 10_000
left, right, lmask, rmask = np.random.randint(0, 2, size=(4, N)).astype("bool")
self.left = pd.arrays.BooleanArray(left, lmask)
self.right = pd.arrays.BooleanArray(right, rmask)
def time_or_scalar(self):
self.left | True
self.left | False
Reported by Pylint.
Line: 14
Column: 9
self.right = pd.arrays.BooleanArray(right, rmask)
def time_or_scalar(self):
self.left | True
self.left | False
def time_or_array(self):
self.left | self.right
Reported by Pylint.
Line: 15
Column: 9
def time_or_scalar(self):
self.left | True
self.left | False
def time_or_array(self):
self.left | self.right
def time_and_scalar(self):
Reported by Pylint.
Line: 18
Column: 9
self.left | False
def time_or_array(self):
self.left | self.right
def time_and_scalar(self):
self.left & True
self.left & False
Reported by Pylint.
Line: 21
Column: 9
self.left | self.right
def time_and_scalar(self):
self.left & True
self.left & False
def time_and_array(self):
self.left & self.right
Reported by Pylint.
Line: 22
Column: 9
def time_and_scalar(self):
self.left & True
self.left & False
def time_and_array(self):
self.left & self.right
def time_xor_scalar(self):
Reported by Pylint.
Line: 25
Column: 9
self.left & False
def time_and_array(self):
self.left & self.right
def time_xor_scalar(self):
self.left ^ True
self.left ^ False
Reported by Pylint.
Line: 28
Column: 9
self.left & self.right
def time_xor_scalar(self):
self.left ^ True
self.left ^ False
def time_xor_array(self):
self.left ^ self.right
Reported by Pylint.