The following issues were found
asv_bench/benchmarks/tslibs/period.py
38 issues
Line: 8
Column: 1
import numpy as np
from pandas._libs.tslibs.period import (
Period,
periodarr_to_dt64arr,
)
from pandas.tseries.frequencies import to_offset
Reported by Pylint.
Line: 13
Column: 1
periodarr_to_dt64arr,
)
from pandas.tseries.frequencies import to_offset
from .tslib import (
_sizes,
_tzs,
tzlocal_obj,
Reported by Pylint.
Line: 15
Column: 1
from pandas.tseries.frequencies import to_offset
from .tslib import (
_sizes,
_tzs,
tzlocal_obj,
)
Reported by Pylint.
Line: 51
Column: 27
)
param_names = ["freq", "attr"]
def setup(self, freq, attr):
self.per = Period("2012-06-01", freq=freq)
def time_property(self, freq, attr):
getattr(self.per, attr)
Reported by Pylint.
Line: 52
Column: 9
param_names = ["freq", "attr"]
def setup(self, freq, attr):
self.per = Period("2012-06-01", freq=freq)
def time_property(self, freq, attr):
getattr(self.per, attr)
Reported by Pylint.
Line: 54
Column: 29
def setup(self, freq, attr):
self.per = Period("2012-06-01", freq=freq)
def time_property(self, freq, attr):
getattr(self.per, attr)
class PeriodUnaryMethods:
Reported by Pylint.
Line: 64
Column: 9
param_names = ["freq"]
def setup(self, freq):
self.per = Period("2012-06-01", freq=freq)
def time_to_timestamp(self, freq):
self.per.to_timestamp()
def time_now(self, freq):
Reported by Pylint.
Line: 66
Column: 33
def setup(self, freq):
self.per = Period("2012-06-01", freq=freq)
def time_to_timestamp(self, freq):
self.per.to_timestamp()
def time_now(self, freq):
self.per.now(freq)
Reported by Pylint.
Line: 72
Column: 27
def time_now(self, freq):
self.per.now(freq)
def time_asfreq(self, freq):
self.per.asfreq("A")
class PeriodConstructor:
params = [["D"], [True, False]]
Reported by Pylint.
Line: 82
Column: 13
def setup(self, freq, is_offset):
if is_offset:
self.freq = to_offset(freq)
else:
self.freq = freq
def time_period_constructor(self, freq, is_offset):
Period("2012-06-01", freq=freq)
Reported by Pylint.
pandas/core/indexes/extension.py
38 issues
Line: 72
Column: 32
if cache:
def cached(self):
return getattr(self._data, name)
cached.__name__ = name
cached.__doc__ = attr.__doc__
method = cache_readonly(cached)
Reported by Pylint.
Line: 81
Column: 34
else:
def fget(self):
result = getattr(self._data, name)
if wrap:
if isinstance(result, type(self._data)):
return type(self)._simple_new(result, name=self.name)
elif isinstance(result, ABCDataFrame):
return result.set_index(self)
Reported by Pylint.
Line: 83
Column: 48
def fget(self):
result = getattr(self._data, name)
if wrap:
if isinstance(result, type(self._data)):
return type(self)._simple_new(result, name=self.name)
elif isinstance(result, ABCDataFrame):
return result.set_index(self)
return Index(result, name=self.name)
return result
Reported by Pylint.
Line: 84
Column: 32
result = getattr(self._data, name)
if wrap:
if isinstance(result, type(self._data)):
return type(self)._simple_new(result, name=self.name)
elif isinstance(result, ABCDataFrame):
return result.set_index(self)
return Index(result, name=self.name)
return result
Reported by Pylint.
Line: 91
Column: 25
return result
def fset(self, value):
setattr(self._data, name, value)
fget.__name__ = name
fget.__doc__ = attr.__doc__
method = property(fget, fset)
Reported by Pylint.
Line: 107
Column: 27
def method(self, *args, **kwargs):
if "inplace" in kwargs:
raise ValueError(f"cannot use inplace with {type(self).__name__}")
result = attr(self._data, *args, **kwargs)
if wrap:
if isinstance(result, type(self._data)):
return type(self)._simple_new(result, name=self.name)
elif isinstance(result, ABCDataFrame):
return result.set_index(self)
Reported by Pylint.
Line: 109
Column: 44
raise ValueError(f"cannot use inplace with {type(self).__name__}")
result = attr(self._data, *args, **kwargs)
if wrap:
if isinstance(result, type(self._data)):
return type(self)._simple_new(result, name=self.name)
elif isinstance(result, ABCDataFrame):
return result.set_index(self)
return Index(result, name=self.name)
return result
Reported by Pylint.
Line: 110
Column: 28
result = attr(self._data, *args, **kwargs)
if wrap:
if isinstance(result, type(self._data)):
return type(self)._simple_new(result, name=self.name)
elif isinstance(result, ABCDataFrame):
return result.set_index(self)
return Index(result, name=self.name)
return result
Reported by Pylint.
Line: 153
Column: 21
if isinstance(other, ABCSeries):
# the arrays defer to Series for comparison ops but the indexes
# don't, so we have to unwrap here.
other = other._values
other = _maybe_unwrap_index(other)
op = getattr(self._data, opname)
return op(other)
Reported by Pylint.
Line: 157
Column: 22
other = _maybe_unwrap_index(other)
op = getattr(self._data, opname)
return op(other)
wrapper.__name__ = opname
return wrapper
Reported by Pylint.
pandas/tests/reshape/concat/test_categorical.py
38 issues
Line: 1
Column: 1
import numpy as np
from pandas.core.dtypes.dtypes import CategoricalDtype
import pandas as pd
from pandas import (
Categorical,
DataFrame,
Series,
Reported by Pylint.
Line: 14
Column: 1
import pandas._testing as tm
class TestCategoricalConcat:
def test_categorical_concat(self, sort):
# See GH 10177
df1 = DataFrame(
np.arange(18, dtype="int64").reshape(6, 3), columns=["a", "b", "c"]
)
Reported by Pylint.
Line: 15
Column: 5
class TestCategoricalConcat:
def test_categorical_concat(self, sort):
# See GH 10177
df1 = DataFrame(
np.arange(18, dtype="int64").reshape(6, 3), columns=["a", "b", "c"]
)
Reported by Pylint.
Line: 15
Column: 5
class TestCategoricalConcat:
def test_categorical_concat(self, sort):
# See GH 10177
df1 = DataFrame(
np.arange(18, dtype="int64").reshape(6, 3), columns=["a", "b", "c"]
)
Reported by Pylint.
Line: 52
Column: 5
exp["h"] = exp["h"].astype(df2["h"].dtype)
tm.assert_frame_equal(res, exp)
def test_categorical_concat_dtypes(self):
# GH8143
index = ["cat", "obj", "num"]
cat = Categorical(["a", "b", "c"])
obj = Series(["a", "b", "c"])
Reported by Pylint.
Line: 52
Column: 5
exp["h"] = exp["h"].astype(df2["h"].dtype)
tm.assert_frame_equal(res, exp)
def test_categorical_concat_dtypes(self):
# GH8143
index = ["cat", "obj", "num"]
cat = Categorical(["a", "b", "c"])
obj = Series(["a", "b", "c"])
Reported by Pylint.
Line: 59
Column: 9
cat = Categorical(["a", "b", "c"])
obj = Series(["a", "b", "c"])
num = Series([1, 2, 3])
df = pd.concat([Series(cat), obj, num], axis=1, keys=index)
result = df.dtypes == "object"
expected = Series([False, True, False], index=index)
tm.assert_series_equal(result, expected)
Reported by Pylint.
Line: 73
Column: 5
expected = Series([True, False, False], index=index)
tm.assert_series_equal(result, expected)
def test_concat_categoricalindex(self):
# GH 16111, categories that aren't lexsorted
categories = [9, 0, 1, 2, 3]
a = Series(1, index=pd.CategoricalIndex([9, 0], categories=categories))
b = Series(2, index=pd.CategoricalIndex([0, 1], categories=categories))
Reported by Pylint.
Line: 73
Column: 5
expected = Series([True, False, False], index=index)
tm.assert_series_equal(result, expected)
def test_concat_categoricalindex(self):
# GH 16111, categories that aren't lexsorted
categories = [9, 0, 1, 2, 3]
a = Series(1, index=pd.CategoricalIndex([9, 0], categories=categories))
b = Series(2, index=pd.CategoricalIndex([0, 1], categories=categories))
Reported by Pylint.
Line: 77
Column: 9
# GH 16111, categories that aren't lexsorted
categories = [9, 0, 1, 2, 3]
a = Series(1, index=pd.CategoricalIndex([9, 0], categories=categories))
b = Series(2, index=pd.CategoricalIndex([0, 1], categories=categories))
c = Series(3, index=pd.CategoricalIndex([1, 2], categories=categories))
result = pd.concat([a, b, c], axis=1)
Reported by Pylint.
pandas/tests/indexes/datetimelike_/test_sort_values.py
38 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
DatetimeIndex,
Index,
NaT,
PeriodIndex,
TimedeltaIndex,
Reported by Pylint.
Line: 315
Column: 5
# GH#35922. sort_values is stable both for normal and datetime-like Index
pidx = PeriodIndex(["2011", "2013", "2015", "2012", "2011"], name="pidx", freq="A")
iidx = Index([2011, 2013, 2015, 2012, 2011], name="idx")
ordered1, indexer1 = pidx.sort_values(return_indexer=True, ascending=False)
ordered2, indexer2 = iidx.sort_values(return_indexer=True, ascending=False)
tm.assert_numpy_array_equal(indexer1, indexer2)
Reported by Pylint.
Line: 316
Column: 5
pidx = PeriodIndex(["2011", "2013", "2015", "2012", "2011"], name="pidx", freq="A")
iidx = Index([2011, 2013, 2015, 2012, 2011], name="idx")
ordered1, indexer1 = pidx.sort_values(return_indexer=True, ascending=False)
ordered2, indexer2 = iidx.sort_values(return_indexer=True, ascending=False)
tm.assert_numpy_array_equal(indexer1, indexer2)
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas import (
DatetimeIndex,
Index,
NaT,
PeriodIndex,
TimedeltaIndex,
Reported by Pylint.
Line: 22
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
period_range/date_range/timedelta_range.
"""
if isinstance(ordered, PeriodIndex):
assert ordered.freq == orig.freq
elif isinstance(ordered, (DatetimeIndex, TimedeltaIndex)):
if ascending:
assert ordered.freq.n == orig.freq.n
else:
assert ordered.freq.n == -1 * orig.freq.n
Reported by Bandit.
Line: 25
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
assert ordered.freq == orig.freq
elif isinstance(ordered, (DatetimeIndex, TimedeltaIndex)):
if ascending:
assert ordered.freq.n == orig.freq.n
else:
assert ordered.freq.n == -1 * orig.freq.n
def check_freq_nonmonotonic(ordered, orig):
Reported by Bandit.
Line: 27
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
if ascending:
assert ordered.freq.n == orig.freq.n
else:
assert ordered.freq.n == -1 * orig.freq.n
def check_freq_nonmonotonic(ordered, orig):
"""
Check the expected freq on a PeriodIndex/DatetimeIndex/TimedeltaIndex
Reported by Bandit.
Line: 37
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
period_range/date_range//timedelta_range.
"""
if isinstance(ordered, PeriodIndex):
assert ordered.freq == orig.freq
elif isinstance(ordered, (DatetimeIndex, TimedeltaIndex)):
assert ordered.freq is None
class TestSortValues:
Reported by Bandit.
Line: 39
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
if isinstance(ordered, PeriodIndex):
assert ordered.freq == orig.freq
elif isinstance(ordered, (DatetimeIndex, TimedeltaIndex)):
assert ordered.freq is None
class TestSortValues:
@pytest.fixture(params=[DatetimeIndex, TimedeltaIndex, PeriodIndex])
def non_monotonic_idx(self, request):
Reported by Bandit.
Line: 42
Column: 1
assert ordered.freq is None
class TestSortValues:
@pytest.fixture(params=[DatetimeIndex, TimedeltaIndex, PeriodIndex])
def non_monotonic_idx(self, request):
if request.param is DatetimeIndex:
return DatetimeIndex(["2000-01-04", "2000-01-01", "2000-01-02"])
elif request.param is PeriodIndex:
Reported by Pylint.
pandas/tests/indexes/period/test_partial_slicing.py
38 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
DataFrame,
Series,
date_range,
period_range,
)
Reported by Pylint.
Line: 129
Column: 14
def test_partial_slice_doesnt_require_monotonicity(self):
# See also: DatetimeIndex test ofm the same name
dti = date_range("2014-01-01", periods=30, freq="30D")
pi = dti.to_period("D")
ser_montonic = Series(np.arange(30), index=pi)
shuffler = list(range(0, 30, 2)) + list(range(1, 31, 2))
ser = ser_montonic[shuffler]
Reported by Pylint.
Line: 129
Column: 14
def test_partial_slice_doesnt_require_monotonicity(self):
# See also: DatetimeIndex test ofm the same name
dti = date_range("2014-01-01", periods=30, freq="30D")
pi = dti.to_period("D")
ser_montonic = Series(np.arange(30), index=pi)
shuffler = list(range(0, 30, 2)) + list(range(1, 31, 2))
ser = ser_montonic[shuffler]
Reported by Pylint.
Line: 40
Column: 17
]
for v in values:
with pytest.raises(TypeError, match=msg):
idx[v:]
s = Series(np.random.rand(len(idx)), index=idx)
tm.assert_series_equal(s["2013/01/02":], s[1:])
tm.assert_series_equal(s["2013/01/02":"2013/01/05"], s[1:5])
Reported by Pylint.
Line: 52
Column: 17
invalid = ["2013/02/01 9H", "2013/02/01 09:00"]
for v in invalid:
with pytest.raises(TypeError, match=msg):
idx[v:]
@pytest.mark.parametrize("make_range", [date_range, period_range])
def test_range_slice_seconds(self, make_range):
# GH#6716
idx = make_range(start="2013/01/01 09:00:00", freq="S", periods=4000)
Reported by Pylint.
Line: 70
Column: 17
]
for v in values:
with pytest.raises(TypeError, match=msg):
idx[v:]
s = Series(np.random.rand(len(idx)), index=idx)
tm.assert_series_equal(s["2013/01/01 09:05":"2013/01/01 09:10"], s[300:660])
tm.assert_series_equal(s["2013/01/01 10:00":"2013/01/01 10:05"], s[3600:3960])
Reported by Pylint.
Line: 113
Column: 13
# Check the lower-level calls are raising where expected.
with pytest.raises(TypeError, match=msg):
idx._maybe_cast_slice_bound("foo", "left")
with pytest.raises(TypeError, match=msg):
idx.get_slice_bound("foo", "left")
with pytest.raises(TypeError, match=msg):
obj["2013/09/30":"foo"]
Reported by Pylint.
Line: 118
Column: 13
idx.get_slice_bound("foo", "left")
with pytest.raises(TypeError, match=msg):
obj["2013/09/30":"foo"]
with pytest.raises(TypeError, match=msg):
obj["foo":"2013/09/30"]
with pytest.raises(TypeError, match=msg):
obj.loc["2013/09/30":"foo"]
with pytest.raises(TypeError, match=msg):
Reported by Pylint.
Line: 120
Column: 13
with pytest.raises(TypeError, match=msg):
obj["2013/09/30":"foo"]
with pytest.raises(TypeError, match=msg):
obj["foo":"2013/09/30"]
with pytest.raises(TypeError, match=msg):
obj.loc["2013/09/30":"foo"]
with pytest.raises(TypeError, match=msg):
obj.loc["foo":"2013/09/30"]
Reported by Pylint.
Line: 122
Column: 13
with pytest.raises(TypeError, match=msg):
obj["foo":"2013/09/30"]
with pytest.raises(TypeError, match=msg):
obj.loc["2013/09/30":"foo"]
with pytest.raises(TypeError, match=msg):
obj.loc["foo":"2013/09/30"]
def test_partial_slice_doesnt_require_monotonicity(self):
# See also: DatetimeIndex test ofm the same name
Reported by Pylint.
pandas/tests/indexes/multi/test_compat.py
38 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import MultiIndex
import pandas._testing as tm
def test_numeric_compat(idx):
with pytest.raises(TypeError, match="cannot perform __mul__"):
Reported by Pylint.
Line: 50
Column: 12
assert "_values" not in mi1._cache
assert "_values" not in mi2._cache
vals = mi1.values.copy()
vals2 = mi2.values.copy()
# accessing .values should cache ._values
assert mi1._values is mi1._cache["_values"]
assert mi1.values is mi1._cache["_values"]
Reported by Pylint.
Line: 51
Column: 13
assert "_values" not in mi2._cache
vals = mi1.values.copy()
vals2 = mi2.values.copy()
# accessing .values should cache ._values
assert mi1._values is mi1._cache["_values"]
assert mi1.values is mi1._cache["_values"]
assert isinstance(mi1._cache["_values"], np.ndarray)
Reported by Pylint.
Line: 10
Column: 9
def test_numeric_compat(idx):
with pytest.raises(TypeError, match="cannot perform __mul__"):
idx * 1
with pytest.raises(TypeError, match="cannot perform __rmul__"):
1 * idx
div_err = "cannot perform __truediv__"
Reported by Pylint.
Line: 13
Column: 9
idx * 1
with pytest.raises(TypeError, match="cannot perform __rmul__"):
1 * idx
div_err = "cannot perform __truediv__"
with pytest.raises(TypeError, match=div_err):
idx / 1
Reported by Pylint.
Line: 17
Column: 9
div_err = "cannot perform __truediv__"
with pytest.raises(TypeError, match=div_err):
idx / 1
div_err = div_err.replace(" __", " __r")
with pytest.raises(TypeError, match=div_err):
1 / idx
Reported by Pylint.
Line: 21
Column: 9
div_err = div_err.replace(" __", " __r")
with pytest.raises(TypeError, match=div_err):
1 / idx
with pytest.raises(TypeError, match="cannot perform __floordiv__"):
idx // 1
with pytest.raises(TypeError, match="cannot perform __rfloordiv__"):
Reported by Pylint.
Line: 24
Column: 9
1 / idx
with pytest.raises(TypeError, match="cannot perform __floordiv__"):
idx // 1
with pytest.raises(TypeError, match="cannot perform __rfloordiv__"):
1 // idx
Reported by Pylint.
Line: 27
Column: 9
idx // 1
with pytest.raises(TypeError, match="cannot perform __rfloordiv__"):
1 // idx
@pytest.mark.parametrize("method", ["all", "any"])
def test_logical_compat(idx, method):
msg = f"cannot perform {method}"
Reported by Pylint.
Line: 47
Column: 29
mi2 = MultiIndex(levels=levels2, codes=codes)
# instantiating MultiIndex should not access/cache _.values
assert "_values" not in mi1._cache
assert "_values" not in mi2._cache
vals = mi1.values.copy()
vals2 = mi2.values.copy()
Reported by Pylint.
pandas/tests/series/test_cumulative.py
38 issues
Line: 11
Column: 1
from itertools import product
import numpy as np
import pytest
import pandas as pd
import pandas._testing as tm
Reported by Pylint.
Line: 50
Column: 24
result = ts.cummin()[1::2]
expected = np.minimum.accumulate(ts.dropna())
result.index = result.index._with_freq(None)
tm.assert_series_equal(result, expected)
def test_cummax(self, datetime_series):
tm.assert_numpy_array_equal(
datetime_series.cummax().values,
Reported by Pylint.
Line: 63
Column: 24
result = ts.cummax()[1::2]
expected = np.maximum.accumulate(ts.dropna())
result.index = result.index._with_freq(None)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("tz", [None, "US/Pacific"])
def test_cummin_datetime64(self, tz):
s = pd.Series(
Reported by Pylint.
Line: 24
Column: 5
)
# with missing values
ts = series.copy()
ts[::2] = np.NaN
result = func(ts)[1::2]
expected = func(np.array(ts.dropna()))
Reported by Pylint.
Line: 33
Column: 1
tm.assert_numpy_array_equal(result.values, expected, check_dtype=False)
class TestSeriesCumulativeOps:
def test_cumsum(self, datetime_series):
_check_accum_op("cumsum", datetime_series)
def test_cumprod(self, datetime_series):
_check_accum_op("cumprod", datetime_series)
Reported by Pylint.
Line: 34
Column: 5
class TestSeriesCumulativeOps:
def test_cumsum(self, datetime_series):
_check_accum_op("cumsum", datetime_series)
def test_cumprod(self, datetime_series):
_check_accum_op("cumprod", datetime_series)
Reported by Pylint.
Line: 34
Column: 5
class TestSeriesCumulativeOps:
def test_cumsum(self, datetime_series):
_check_accum_op("cumsum", datetime_series)
def test_cumprod(self, datetime_series):
_check_accum_op("cumprod", datetime_series)
Reported by Pylint.
Line: 37
Column: 5
def test_cumsum(self, datetime_series):
_check_accum_op("cumsum", datetime_series)
def test_cumprod(self, datetime_series):
_check_accum_op("cumprod", datetime_series)
def test_cummin(self, datetime_series):
tm.assert_numpy_array_equal(
datetime_series.cummin().values,
Reported by Pylint.
Line: 37
Column: 5
def test_cumsum(self, datetime_series):
_check_accum_op("cumsum", datetime_series)
def test_cumprod(self, datetime_series):
_check_accum_op("cumprod", datetime_series)
def test_cummin(self, datetime_series):
tm.assert_numpy_array_equal(
datetime_series.cummin().values,
Reported by Pylint.
Line: 40
Column: 5
def test_cumprod(self, datetime_series):
_check_accum_op("cumprod", datetime_series)
def test_cummin(self, datetime_series):
tm.assert_numpy_array_equal(
datetime_series.cummin().values,
np.minimum.accumulate(np.array(datetime_series)),
)
ts = datetime_series.copy()
Reported by Pylint.
pandas/tests/indexes/timedeltas/test_ops.py
38 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
Series,
TimedeltaIndex,
timedelta_range,
)
import pandas._testing as tm
Reported by Pylint.
Line: 47
Column: 16
idx = timedelta_range("1", freq=freq_sample, periods=10)
result = TimedeltaIndex(idx.asi8, freq="infer")
tm.assert_index_equal(idx, result)
assert result.freq == freq_sample
@pytest.mark.parametrize("values", [["0 days", "2 days", "4 days"], []])
@pytest.mark.parametrize("freq", ["2D", Day(2), "48H", Hour(48)])
def test_freq_setter(self, values, freq):
# GH 20678
Reported by Pylint.
Line: 57
Column: 16
# can set to an offset, converting from string if necessary
idx._data.freq = freq
assert idx.freq == freq
assert isinstance(idx.freq, DateOffset)
# can reset to None
idx._data.freq = None
assert idx.freq is None
Reported by Pylint.
Line: 58
Column: 27
# can set to an offset, converting from string if necessary
idx._data.freq = freq
assert idx.freq == freq
assert isinstance(idx.freq, DateOffset)
# can reset to None
idx._data.freq = None
assert idx.freq is None
Reported by Pylint.
Line: 62
Column: 16
# can reset to None
idx._data.freq = None
assert idx.freq is None
def test_freq_setter_errors(self):
# GH 20678
idx = TimedeltaIndex(["0 days", "2 days", "4 days"])
Reported by Pylint.
Line: 96
Column: 16
assert tdi2.freq is None
# Original was not altered
assert tdi.freq == "2D"
assert tda.freq == "2D"
Reported by Pylint.
Line: 40
Column: 13
assert "foo" not in ts.__dict__.keys()
msg = "'Series' object has no attribute 'foo'"
with pytest.raises(AttributeError, match=msg):
ts.foo
def test_infer_freq(self, freq_sample):
# GH#11018
idx = timedelta_range("1", freq=freq_sample, periods=10)
result = TimedeltaIndex(idx.asi8, freq="infer")
Reported by Pylint.
Line: 56
Column: 9
idx = TimedeltaIndex(values)
# can set to an offset, converting from string if necessary
idx._data.freq = freq
assert idx.freq == freq
assert isinstance(idx.freq, DateOffset)
# can reset to None
idx._data.freq = None
Reported by Pylint.
Line: 61
Column: 9
assert isinstance(idx.freq, DateOffset)
# can reset to None
idx._data.freq = None
assert idx.freq is None
def test_freq_setter_errors(self):
# GH 20678
idx = TimedeltaIndex(["0 days", "2 days", "4 days"])
Reported by Pylint.
Line: 74
Column: 13
"passed frequency 5D"
)
with pytest.raises(ValueError, match=msg):
idx._data.freq = "5D"
# setting with a non-fixed frequency
msg = r"<2 \* BusinessDays> is a non-fixed frequency"
with pytest.raises(ValueError, match=msg):
idx._data.freq = "2B"
Reported by Pylint.
asv_bench/benchmarks/io/style.py
37 issues
Line: 3
Column: 1
import numpy as np
from pandas import (
DataFrame,
IndexSlice,
)
class Render:
Reported by Pylint.
Line: 15
Column: 9
param_names = ["cols", "rows"]
def setup(self, cols, rows):
self.df = DataFrame(
np.random.randn(rows, cols),
columns=[f"float_{i+1}" for i in range(cols)],
index=[f"row_{i+1}" for i in range(rows)],
)
Reported by Pylint.
Line: 21
Column: 39
index=[f"row_{i+1}" for i in range(rows)],
)
def time_apply_render(self, cols, rows):
self._style_apply()
self.st._render_html(True, True)
def peakmem_apply_render(self, cols, rows):
self._style_apply()
Reported by Pylint.
Line: 21
Column: 33
index=[f"row_{i+1}" for i in range(rows)],
)
def time_apply_render(self, cols, rows):
self._style_apply()
self.st._render_html(True, True)
def peakmem_apply_render(self, cols, rows):
self._style_apply()
Reported by Pylint.
Line: 23
Column: 9
def time_apply_render(self, cols, rows):
self._style_apply()
self.st._render_html(True, True)
def peakmem_apply_render(self, cols, rows):
self._style_apply()
self.st._render_html(True, True)
Reported by Pylint.
Line: 25
Column: 36
self._style_apply()
self.st._render_html(True, True)
def peakmem_apply_render(self, cols, rows):
self._style_apply()
self.st._render_html(True, True)
def time_classes_render(self, cols, rows):
self._style_classes()
Reported by Pylint.
Line: 25
Column: 42
self._style_apply()
self.st._render_html(True, True)
def peakmem_apply_render(self, cols, rows):
self._style_apply()
self.st._render_html(True, True)
def time_classes_render(self, cols, rows):
self._style_classes()
Reported by Pylint.
Line: 27
Column: 9
def peakmem_apply_render(self, cols, rows):
self._style_apply()
self.st._render_html(True, True)
def time_classes_render(self, cols, rows):
self._style_classes()
self.st._render_html(True, True)
Reported by Pylint.
Line: 29
Column: 35
self._style_apply()
self.st._render_html(True, True)
def time_classes_render(self, cols, rows):
self._style_classes()
self.st._render_html(True, True)
def peakmem_classes_render(self, cols, rows):
self._style_classes()
Reported by Pylint.
Line: 29
Column: 41
self._style_apply()
self.st._render_html(True, True)
def time_classes_render(self, cols, rows):
self._style_classes()
self.st._render_html(True, True)
def peakmem_classes_render(self, cols, rows):
self._style_classes()
Reported by Pylint.
pandas/core/arrays/base.py
37 issues
Line: 25
Column: 1
import numpy as np
from pandas._libs import lib
from pandas._typing import (
ArrayLike,
Dtype,
FillnaOptions,
PositionalIndexer,
Reported by Pylint.
Line: 77
Column: 5
if TYPE_CHECKING:
class ExtensionArraySupportsAnyAll("ExtensionArray"):
def any(self, *, skipna: bool = True) -> bool:
pass
def all(self, *, skipna: bool = True) -> bool:
pass
Reported by Pylint.
Line: 544
Column: 3
else:
return self.copy()
# FIXME: Really hard-code here?
if isinstance(dtype, StringDtype):
# allow conversion to StringArrays
return dtype.construct_array_type()._from_sequence(self, copy=False)
return np.array(self, dtype=dtype, copy=copy)
Reported by Pylint.
Line: 547
Column: 20
# FIXME: Really hard-code here?
if isinstance(dtype, StringDtype):
# allow conversion to StringArrays
return dtype.construct_array_type()._from_sequence(self, copy=False)
return np.array(self, dtype=dtype, copy=copy)
def isna(self) -> np.ndarray | ExtensionArraySupportsAnyAll:
"""
Reported by Pylint.
Line: 590
Column: 5
# Note: this is used in `ExtensionArray.argsort`.
return np.array(self)
def argsort(
self,
ascending: bool = True,
kind: str = "quicksort",
na_position: str = "last",
*args,
Reported by Pylint.
Line: 1210
Column: 1
# Reshaping
# ------------------------------------------------------------------------
def transpose(self, *axes: int) -> ExtensionArray:
"""
Return a transposed view on this array.
Because ExtensionArrays are always 1D, this is a no-op. It is included
for compatibility with np.ndarray.
Reported by Pylint.
Line: 1223
Column: 21
def T(self) -> ExtensionArray:
return self.transpose()
def ravel(self, order: Literal["C", "F", "A", "K"] | None = "C") -> ExtensionArray:
"""
Return a flattened view on this array.
Parameters
----------
Reported by Pylint.
Line: 1271
Column: 16
# of objects
@cache_readonly
def _can_hold_na(self) -> bool:
return self.dtype._can_hold_na
def _reduce(self, name: str, *, skipna: bool = True, **kwargs):
"""
Return a scalar result of performing the reduction operation.
Reported by Pylint.
Line: 1
Column: 1
"""
An interface for extending pandas with custom arrays.
.. warning::
This is an experimental API and subject to breaking changes
without warning.
"""
from __future__ import annotations
Reported by Pylint.
Line: 77
Column: 5
if TYPE_CHECKING:
class ExtensionArraySupportsAnyAll("ExtensionArray"):
def any(self, *, skipna: bool = True) -> bool:
pass
def all(self, *, skipna: bool = True) -> bool:
pass
Reported by Pylint.