The following issues were found
pandas/tests/io/test_fsspec.py
76 issues
Line: 4
Column: 1
import io
import numpy as np
import pytest
from pandas import (
DataFrame,
date_range,
read_csv,
Reported by Pylint.
Line: 41
Column: 5
def test_read_csv(cleared_fs):
from fsspec.implementations.memory import MemoryFile
cleared_fs.store["test/test.csv"] = MemoryFile(data=text)
df2 = read_csv("memory://test/test.csv", parse_dates=["dt"])
tm.assert_frame_equal(df1, df2)
Reported by Pylint.
Line: 50
Column: 5
def test_reasonable_error(monkeypatch, cleared_fs):
from fsspec import registry
from fsspec.registry import known_implementations
registry.target.clear()
with pytest.raises(ValueError, match="nosuchprotocol"):
read_csv("nosuchprotocol://test/test.csv")
Reported by Pylint.
Line: 51
Column: 5
def test_reasonable_error(monkeypatch, cleared_fs):
from fsspec import registry
from fsspec.registry import known_implementations
registry.target.clear()
with pytest.raises(ValueError, match="nosuchprotocol"):
read_csv("nosuchprotocol://test/test.csv")
err_msg = "test error message"
Reported by Pylint.
Line: 40
Column: 19
memfs.store.clear()
def test_read_csv(cleared_fs):
from fsspec.implementations.memory import MemoryFile
cleared_fs.store["test/test.csv"] = MemoryFile(data=text)
df2 = read_csv("memory://test/test.csv", parse_dates=["dt"])
Reported by Pylint.
Line: 49
Column: 40
tm.assert_frame_equal(df1, df2)
def test_reasonable_error(monkeypatch, cleared_fs):
from fsspec import registry
from fsspec.registry import known_implementations
registry.target.clear()
with pytest.raises(ValueError, match="nosuchprotocol"):
Reported by Pylint.
Line: 49
Column: 40
tm.assert_frame_equal(df1, df2)
def test_reasonable_error(monkeypatch, cleared_fs):
from fsspec import registry
from fsspec.registry import known_implementations
registry.target.clear()
with pytest.raises(ValueError, match="nosuchprotocol"):
Reported by Pylint.
Line: 66
Column: 17
read_csv("couldexist://test/test.csv")
def test_to_csv(cleared_fs):
df1.to_csv("memory://test/test.csv", index=True)
df2 = read_csv("memory://test/test.csv", parse_dates=["dt"], index_col=0)
tm.assert_frame_equal(df1, df2)
Reported by Pylint.
Line: 66
Column: 17
read_csv("couldexist://test/test.csv")
def test_to_csv(cleared_fs):
df1.to_csv("memory://test/test.csv", index=True)
df2 = read_csv("memory://test/test.csv", parse_dates=["dt"], index_col=0)
tm.assert_frame_equal(df1, df2)
Reported by Pylint.
Line: 75
Column: 19
@pytest.mark.parametrize("ext", ["xls", "xlsx"])
def test_to_excel(cleared_fs, ext):
if ext == "xls":
pytest.importorskip("xlwt")
else:
pytest.importorskip("openpyxl")
Reported by Pylint.
pandas/tests/indexes/datetimes/test_ops.py
76 issues
Line: 4
Column: 1
from datetime import datetime
from dateutil.tz import tzlocal
import pytest
from pandas.compat import IS64
from pandas import (
DateOffset,
Reported by Pylint.
Line: 68
Column: 16
)
idx = date_range(start="2013-04-01", periods=30, freq=freq, tz=tz)
assert idx.resolution == expected
def test_infer_freq(self, freq_sample):
# GH 11018
idx = date_range("2011-01-01 09:00:00", freq=freq_sample, periods=10)
result = DatetimeIndex(idx.asi8, freq="infer")
Reported by Pylint.
Line: 68
Column: 16
)
idx = date_range(start="2013-04-01", periods=30, freq=freq, tz=tz)
assert idx.resolution == expected
def test_infer_freq(self, freq_sample):
# GH 11018
idx = date_range("2011-01-01 09:00:00", freq=freq_sample, periods=10)
result = DatetimeIndex(idx.asi8, freq="infer")
Reported by Pylint.
Line: 75
Column: 16
idx = date_range("2011-01-01 09:00:00", freq=freq_sample, periods=10)
result = DatetimeIndex(idx.asi8, freq="infer")
tm.assert_index_equal(idx, result)
assert result.freq == freq_sample
@pytest.mark.parametrize("values", [["20180101", "20180103", "20180105"], []])
@pytest.mark.parametrize("freq", ["2D", Day(2), "2B", BDay(2), "48H", Hour(48)])
@pytest.mark.parametrize("tz", [None, "US/Eastern"])
def test_freq_setter(self, values, freq, tz):
Reported by Pylint.
Line: 86
Column: 16
# can set to an offset, converting from string if necessary
idx._data.freq = freq
assert idx.freq == freq
assert isinstance(idx.freq, DateOffset)
# can reset to None
idx._data.freq = None
assert idx.freq is None
Reported by Pylint.
Line: 87
Column: 27
# can set to an offset, converting from string if necessary
idx._data.freq = freq
assert idx.freq == freq
assert isinstance(idx.freq, DateOffset)
# can reset to None
idx._data.freq = None
assert idx.freq is None
Reported by Pylint.
Line: 91
Column: 16
# can reset to None
idx._data.freq = None
assert idx.freq is None
def test_freq_setter_errors(self):
# GH 20678
idx = DatetimeIndex(["20180101", "20180103", "20180105"])
Reported by Pylint.
Line: 120
Column: 16
assert dti2.freq is None
# Original was not altered
assert dti.freq == "D"
assert dta.freq == "D"
class TestBusinessDatetimeIndex:
def setup_method(self, method):
Reported by Pylint.
Line: 120
Column: 16
assert dti2.freq is None
# Original was not altered
assert dti.freq == "D"
assert dta.freq == "D"
class TestBusinessDatetimeIndex:
def setup_method(self, method):
Reported by Pylint.
Line: 44
Column: 13
assert s.day == 10
msg = "'Series' object has no attribute 'weekday'"
with pytest.raises(AttributeError, match=msg):
s.weekday
@pytest.mark.parametrize(
"freq,expected",
[
("A", "day"),
Reported by Pylint.
pandas/tests/test_take.py
76 issues
Line: 5
Column: 1
import re
import numpy as np
import pytest
from pandas._libs import iNaT
import pandas._testing as tm
import pandas.core.algorithms as algos
Reported by Pylint.
Line: 75
Column: 34
# Standard incompatible fill error.
fill_error = re.compile("Incompatible type for fill_value")
def test_1d_fill_nonna(self, dtype_fill_out_dtype):
dtype, fill_value, out_dtype = dtype_fill_out_dtype
data = np.random.randint(0, 2, 4).astype(dtype)
indexer = [2, 1, 0, -1]
result = algos.take_nd(data, indexer, fill_value=fill_value)
Reported by Pylint.
Line: 91
Column: 34
assert (result[[0, 1, 2, 3]] == data[indexer]).all()
assert result.dtype == dtype
def test_2d_fill_nonna(self, dtype_fill_out_dtype):
dtype, fill_value, out_dtype = dtype_fill_out_dtype
data = np.random.randint(0, 2, (5, 3)).astype(dtype)
indexer = [2, 1, 0, -1]
result = algos.take_nd(data, indexer, axis=0, fill_value=fill_value)
Reported by Pylint.
Line: 115
Column: 34
assert (result[:, [0, 1, 2, 3]] == data[:, indexer]).all()
assert result.dtype == dtype
def test_3d_fill_nonna(self, dtype_fill_out_dtype):
dtype, fill_value, out_dtype = dtype_fill_out_dtype
data = np.random.randint(0, 2, (5, 4, 3)).astype(dtype)
indexer = [2, 1, 0, -1]
Reported by Pylint.
Line: 1
Column: 1
from datetime import datetime
import re
import numpy as np
import pytest
from pandas._libs import iNaT
import pandas._testing as tm
Reported by Pylint.
Line: 14
Column: 1
@pytest.fixture(params=[True, False])
def writeable(request):
return request.param
# Check that take_nd works both with writeable arrays
# (in which case fast typed memory-views implementation)
Reported by Pylint.
Line: 35
Column: 1
(np.int8, False),
(np.object_, True),
(np.bool_, False),
]
)
def dtype_can_hold_na(request):
return request.param
Reported by Pylint.
Line: 65
Column: 1
(np.bool_, 3.0 + 4.0j, np.object_),
(np.bool_, True, np.bool_),
(np.bool_, "", np.object_),
]
)
def dtype_fill_out_dtype(request):
return request.param
Reported by Pylint.
Line: 71
Column: 1
return request.param
class TestTake:
# Standard incompatible fill error.
fill_error = re.compile("Incompatible type for fill_value")
def test_1d_fill_nonna(self, dtype_fill_out_dtype):
dtype, fill_value, out_dtype = dtype_fill_out_dtype
Reported by Pylint.
Line: 75
Column: 5
# Standard incompatible fill error.
fill_error = re.compile("Incompatible type for fill_value")
def test_1d_fill_nonna(self, dtype_fill_out_dtype):
dtype, fill_value, out_dtype = dtype_fill_out_dtype
data = np.random.randint(0, 2, 4).astype(dtype)
indexer = [2, 1, 0, -1]
result = algos.take_nd(data, indexer, fill_value=fill_value)
Reported by Pylint.
pandas/tests/tseries/offsets/test_custom_business_hour.py
75 issues
Line: 7
Column: 1
from datetime import datetime
import numpy as np
import pytest
from pandas._libs.tslibs import Timestamp
from pandas._libs.tslibs.offsets import (
BusinessHour,
CustomBusinessHour,
Reported by Pylint.
Line: 10
Column: 1
import pytest
from pandas._libs.tslibs import Timestamp
from pandas._libs.tslibs.offsets import (
BusinessHour,
CustomBusinessHour,
Nano,
)
Reported by Pylint.
Line: 10
Column: 1
import pytest
from pandas._libs.tslibs import Timestamp
from pandas._libs.tslibs.offsets import (
BusinessHour,
CustomBusinessHour,
Nano,
)
Reported by Pylint.
Line: 29
Column: 28
_offset = CustomBusinessHour
holidays = ["2014-06-27", datetime(2014, 6, 30), np.datetime64("2014-07-02")]
def setup_method(self, method):
# 2014 Calendar to check custom holidays
# Sun Mon Tue Wed Thu Fri Sat
# 6/22 23 24 25 26 27 28
# 29 30 7/1 2 3 4 5
# 6 7 8 9 10 11 12
Reported by Pylint.
Line: 36
Column: 9
# 29 30 7/1 2 3 4 5
# 6 7 8 9 10 11 12
self.d = datetime(2014, 7, 1, 10, 00)
self.offset1 = CustomBusinessHour(weekmask="Tue Wed Thu Fri")
self.offset2 = CustomBusinessHour(holidays=self.holidays)
def test_constructor_errors(self):
from datetime import time as dt_time
Reported by Pylint.
Line: 38
Column: 9
self.d = datetime(2014, 7, 1, 10, 00)
self.offset1 = CustomBusinessHour(weekmask="Tue Wed Thu Fri")
self.offset2 = CustomBusinessHour(holidays=self.holidays)
def test_constructor_errors(self):
from datetime import time as dt_time
msg = "time data must be specified only with hour and minute"
Reported by Pylint.
Line: 25
Column: 1
from pandas.tseries.holiday import USFederalHolidayCalendar
class TestCustomBusinessHour(Base):
_offset = CustomBusinessHour
holidays = ["2014-06-27", datetime(2014, 6, 30), np.datetime64("2014-07-02")]
def setup_method(self, method):
# 2014 Calendar to check custom holidays
Reported by Pylint.
Line: 29
Column: 5
_offset = CustomBusinessHour
holidays = ["2014-06-27", datetime(2014, 6, 30), np.datetime64("2014-07-02")]
def setup_method(self, method):
# 2014 Calendar to check custom holidays
# Sun Mon Tue Wed Thu Fri Sat
# 6/22 23 24 25 26 27 28
# 29 30 7/1 2 3 4 5
# 6 7 8 9 10 11 12
Reported by Pylint.
Line: 35
Column: 9
# 6/22 23 24 25 26 27 28
# 29 30 7/1 2 3 4 5
# 6 7 8 9 10 11 12
self.d = datetime(2014, 7, 1, 10, 00)
self.offset1 = CustomBusinessHour(weekmask="Tue Wed Thu Fri")
self.offset2 = CustomBusinessHour(holidays=self.holidays)
def test_constructor_errors(self):
Reported by Pylint.
Line: 40
Column: 5
self.offset2 = CustomBusinessHour(holidays=self.holidays)
def test_constructor_errors(self):
from datetime import time as dt_time
msg = "time data must be specified only with hour and minute"
with pytest.raises(ValueError, match=msg):
CustomBusinessHour(start=dt_time(11, 0, 5))
Reported by Pylint.
pandas/tests/series/methods/test_shift.py
75 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas.errors import NullFrequencyError
import pandas as pd
from pandas import (
DatetimeIndex,
Index,
Reported by Pylint.
Line: 117
Column: 13
s2 = Series(date_range("2000-01-01 09:00:00", periods=5, tz="CET"), name="foo")
msg = "DatetimeArray subtraction must have the same timezones or no timezones"
with pytest.raises(TypeError, match=msg):
s - s2
def test_shift2(self):
ts = Series(
np.random.randn(5), index=date_range("1/1/2000", periods=5, freq="H")
)
Reported by Pylint.
Line: 205
Column: 3
@pytest.mark.filterwarnings("ignore:tshift is deprecated:FutureWarning")
def test_tshift(self, datetime_series):
# TODO: remove this test when tshift deprecation is enforced
# PeriodIndex
ps = tm.makePeriodSeries()
shifted = ps.tshift(1)
unshifted = shifted.tshift(-1)
Reported by Pylint.
Line: 238
Column: 26
)
shifted = inferred_ts.tshift(1)
expected = datetime_series.tshift(1)
expected.index = expected.index._with_freq(None)
tm.assert_series_equal(shifted, expected)
unshifted = shifted.tshift(-1)
tm.assert_series_equal(unshifted, inferred_ts)
Reported by Pylint.
Line: 280
Column: 26
)
shifted = inferred_ts.shift(1, freq="infer")
expected = datetime_series.shift(1, freq="infer")
expected.index = expected.index._with_freq(None)
tm.assert_series_equal(shifted, expected)
unshifted = shifted.shift(-1, freq="infer")
tm.assert_series_equal(unshifted, inferred_ts)
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas.errors import NullFrequencyError
import pandas as pd
from pandas import (
DatetimeIndex,
Index,
Reported by Pylint.
Line: 21
Column: 1
from pandas.tseries.offsets import BDay
class TestShift:
@pytest.mark.parametrize(
"ser",
[
Series([np.arange(5)]),
date_range("1/1/2011", periods=24, freq="H"),
Reported by Pylint.
Line: 29
Column: 5
date_range("1/1/2011", periods=24, freq="H"),
Series(range(5), index=date_range("2017", periods=5)),
],
)
@pytest.mark.parametrize("shift_size", [0, 1, 2])
def test_shift_always_copy(self, ser, shift_size):
# GH22397
assert ser.shift(shift_size) is not ser
Reported by Pylint.
Line: 29
Column: 5
date_range("1/1/2011", periods=24, freq="H"),
Series(range(5), index=date_range("2017", periods=5)),
],
)
@pytest.mark.parametrize("shift_size", [0, 1, 2])
def test_shift_always_copy(self, ser, shift_size):
# GH22397
assert ser.shift(shift_size) is not ser
Reported by Pylint.
Line: 33
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
@pytest.mark.parametrize("shift_size", [0, 1, 2])
def test_shift_always_copy(self, ser, shift_size):
# GH22397
assert ser.shift(shift_size) is not ser
@pytest.mark.parametrize("move_by_freq", [pd.Timedelta("1D"), pd.Timedelta("1min")])
def test_datetime_shift_always_copy(self, move_by_freq):
# GH#22397
ser = Series(range(5), index=date_range("2017", periods=5))
Reported by Bandit.
pandas/io/sas/sas7bdat.py
75 issues
Line: 44
Column: 1
)
from pandas.io.common import get_handle
from pandas.io.sas._sas import Parser
import pandas.io.sas.sas_constants as const
from pandas.io.sas.sasreader import ReaderBase
def _parse_datetime(sas_datetime: float, unit: str):
Reported by Pylint.
Line: 44
Column: 1
)
from pandas.io.common import get_handle
from pandas.io.sas._sas import Parser
import pandas.io.sas.sas_constants as const
from pandas.io.sas.sasreader import ReaderBase
def _parse_datetime(sas_datetime: float, unit: str):
Reported by Pylint.
Line: 106
Column: 3
col_id: int
name: str | bytes
label: str | bytes
format: str | bytes # TODO: i think allowing bytes is from py2 days
ctype: bytes
length: int
def __init__(
self,
Reported by Pylint.
Line: 115
Column: 9
col_id: int,
name: str | bytes,
label: str | bytes,
format: str | bytes,
ctype: bytes,
length: int,
):
self.col_id = col_id
self.name = name
Reported by Pylint.
Line: 426
Column: 9
def _read_page_header(self):
bit_offset = self._page_bit_offset
tx = const.page_type_offset + bit_offset
self._current_page_type = self._read_int(tx, const.page_type_length)
tx = const.block_count_offset + bit_offset
self._current_page_block_count = self._read_int(tx, const.block_count_length)
tx = const.subheader_count_offset + bit_offset
self._current_page_subheaders_count = self._read_int(
tx, const.subheader_count_length
Reported by Pylint.
Line: 428
Column: 9
tx = const.page_type_offset + bit_offset
self._current_page_type = self._read_int(tx, const.page_type_length)
tx = const.block_count_offset + bit_offset
self._current_page_block_count = self._read_int(tx, const.block_count_length)
tx = const.subheader_count_offset + bit_offset
self._current_page_subheaders_count = self._read_int(
tx, const.subheader_count_length
)
Reported by Pylint.
Line: 430
Column: 9
tx = const.block_count_offset + bit_offset
self._current_page_block_count = self._read_int(tx, const.block_count_length)
tx = const.subheader_count_offset + bit_offset
self._current_page_subheaders_count = self._read_int(
tx, const.subheader_count_length
)
def _process_page_metadata(self) -> None:
bit_offset = self._page_bit_offset
Reported by Pylint.
Line: 452
Column: 3
self._process_subheader(subheader_index, pointer)
def _get_subheader_index(self, signature: bytes, compression, ptype) -> int:
# TODO: return here could be made an enum
index = const.subheader_signature_to_index.get(signature)
if index is None:
f1 = (compression == const.compressed_subheader_id) or (compression == 0)
f2 = ptype == const.compressed_subheader_type
if (self.compression != b"") and f1 and f2:
Reported by Pylint.
Line: 522
Column: 55
processor(offset, length)
def _process_rowsize_subheader(self, offset: int, length: int) -> None:
int_len = self._int_length
lcs_offset = offset
lcp_offset = offset
if self.U64:
Reported by Pylint.
Line: 534
Column: 9
lcs_offset += 354
lcp_offset += 378
self.row_length = self._read_int(
offset + const.row_length_offset_multiplier * int_len, int_len
)
self.row_count = self._read_int(
offset + const.row_count_offset_multiplier * int_len, int_len
)
Reported by Pylint.
pandas/tests/series/test_api.py
74 issues
Line: 5
Column: 1
import pydoc
import numpy as np
import pytest
from pandas.util._test_decorators import skip_if_no
import pandas as pd
from pandas import (
Reported by Pylint.
Line: 123
Column: 16
s = Series([1, 2, np.nan])
tm.assert_series_equal(s.dropna(axis="rows"), s.dropna(axis="index"))
assert s.dropna().sum("rows") == 3
assert s._get_axis_number("rows") == 0
assert s._get_axis_name("rows") == "index"
def test_class_axis(self):
# https://github.com/pandas-dev/pandas/issues/18147
# no exception and no empty docstring
Reported by Pylint.
Line: 124
Column: 16
tm.assert_series_equal(s.dropna(axis="rows"), s.dropna(axis="index"))
assert s.dropna().sum("rows") == 3
assert s._get_axis_number("rows") == 0
assert s._get_axis_name("rows") == "index"
def test_class_axis(self):
# https://github.com/pandas-dev/pandas/issues/18147
# no exception and no empty docstring
assert pydoc.getdoc(Series.index)
Reported by Pylint.
Line: 1
Column: 1
import inspect
import pydoc
import numpy as np
import pytest
from pandas.util._test_decorators import skip_if_no
import pandas as pd
Reported by Pylint.
Line: 19
Column: 1
import pandas._testing as tm
class TestSeriesMisc:
def test_tab_completion(self):
# GH 9910
s = Series(list("abcd"))
# Series of str values should have .str but not .dt/.cat in __dir__
assert "str" in dir(s)
Reported by Pylint.
Line: 20
Column: 5
class TestSeriesMisc:
def test_tab_completion(self):
# GH 9910
s = Series(list("abcd"))
# Series of str values should have .str but not .dt/.cat in __dir__
assert "str" in dir(s)
assert "dt" not in dir(s)
Reported by Pylint.
Line: 20
Column: 5
class TestSeriesMisc:
def test_tab_completion(self):
# GH 9910
s = Series(list("abcd"))
# Series of str values should have .str but not .dt/.cat in __dir__
assert "str" in dir(s)
assert "dt" not in dir(s)
Reported by Pylint.
Line: 22
Column: 9
class TestSeriesMisc:
def test_tab_completion(self):
# GH 9910
s = Series(list("abcd"))
# Series of str values should have .str but not .dt/.cat in __dir__
assert "str" in dir(s)
assert "dt" not in dir(s)
assert "cat" not in dir(s)
Reported by Pylint.
Line: 24
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
# GH 9910
s = Series(list("abcd"))
# Series of str values should have .str but not .dt/.cat in __dir__
assert "str" in dir(s)
assert "dt" not in dir(s)
assert "cat" not in dir(s)
# similarly for .dt
s = Series(date_range("1/1/2015", periods=5))
Reported by Bandit.
Line: 25
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
s = Series(list("abcd"))
# Series of str values should have .str but not .dt/.cat in __dir__
assert "str" in dir(s)
assert "dt" not in dir(s)
assert "cat" not in dir(s)
# similarly for .dt
s = Series(date_range("1/1/2015", periods=5))
assert "dt" in dir(s)
Reported by Bandit.
doc/source/conf.py
74 issues
Line: 21
Column: 1
import sys
import jinja2
from numpydoc.docscrape import NumpyDocString
from sphinx.ext.autosummary import _import_by_name
logger = logging.getLogger(__name__)
# https://github.com/sphinx-doc/sphinx/pull/2325/files
Reported by Pylint.
Line: 22
Column: 1
import jinja2
from numpydoc.docscrape import NumpyDocString
from sphinx.ext.autosummary import _import_by_name
logger = logging.getLogger(__name__)
# https://github.com/sphinx-doc/sphinx/pull/2325/files
# Workaround for sphinx-build recursion limit overflow:
Reported by Pylint.
Line: 168
Column: 1
# built documents.
#
# The short X.Y version.
import pandas # isort:skip
# version = '%s r%s' % (pandas.__version__, svn_version())
version = str(pandas.__version__)
# The full version, including alpha/beta/rc tags.
Reported by Pylint.
Line: 463
Column: 1
# Add custom Documenter to handle attributes/methods of an AccessorProperty
# eg pandas.Series.str and pandas.Series.dt (see GH9322)
import sphinx # isort:skip
from sphinx.util import rpartition # isort:skip
from sphinx.ext.autodoc import ( # isort:skip
AttributeDocumenter,
Documenter,
MethodDocumenter,
Reported by Pylint.
Line: 464
Column: 1
# eg pandas.Series.str and pandas.Series.dt (see GH9322)
import sphinx # isort:skip
from sphinx.util import rpartition # isort:skip
from sphinx.ext.autodoc import ( # isort:skip
AttributeDocumenter,
Documenter,
MethodDocumenter,
)
Reported by Pylint.
Line: 465
Column: 1
import sphinx # isort:skip
from sphinx.util import rpartition # isort:skip
from sphinx.ext.autodoc import ( # isort:skip
AttributeDocumenter,
Documenter,
MethodDocumenter,
)
from sphinx.ext.autosummary import Autosummary # isort:skip
Reported by Pylint.
Line: 470
Column: 1
Documenter,
MethodDocumenter,
)
from sphinx.ext.autosummary import Autosummary # isort:skip
class AccessorDocumenter(MethodDocumenter):
"""
Specialized Documenter subclass for accessors.
Reported by Pylint.
Line: 161
Column: 1
# General information about the project.
project = "pandas"
copyright = f"2008-{datetime.now().year}, the pandas development team"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
Reported by Pylint.
Line: 592
Column: 26
@staticmethod
def _is_deprecated(real_name):
try:
obj, parent, modname = _import_by_name(real_name)
except ImportError:
return False
doc = NumpyDocString(obj.__doc__ or "")
summary = "".join(doc["Summary"] + doc["Extended Summary"])
return ".. deprecated::" in summary
Reported by Pylint.
Line: 592
Column: 18
@staticmethod
def _is_deprecated(real_name):
try:
obj, parent, modname = _import_by_name(real_name)
except ImportError:
return False
doc = NumpyDocString(obj.__doc__ or "")
summary = "".join(doc["Summary"] + doc["Extended Summary"])
return ".. deprecated::" in summary
Reported by Pylint.
pandas/tests/arrays/interval/test_interval.py
74 issues
Line: 2
Column: 1
import numpy as np
import pytest
import pandas.util._test_decorators as td
import pandas as pd
from pandas import (
Index,
Interval,
Reported by Pylint.
Line: 173
Column: 5
@pyarrow_skip
def test_arrow_extension_type():
import pyarrow as pa
from pandas.core.arrays._arrow_utils import ArrowIntervalType
p1 = ArrowIntervalType(pa.int64(), "left")
p2 = ArrowIntervalType(pa.int64(), "left")
Reported by Pylint.
Line: 190
Column: 5
@pyarrow_skip
def test_arrow_array():
import pyarrow as pa
from pandas.core.arrays._arrow_utils import ArrowIntervalType
intervals = pd.interval_range(1, 5, freq=1).array
Reported by Pylint.
Line: 220
Column: 5
@pyarrow_skip
def test_arrow_array_missing():
import pyarrow as pa
from pandas.core.arrays._arrow_utils import ArrowIntervalType
arr = IntervalArray.from_breaks([0.0, 1.0, 2.0, 3.0])
arr[1] = None
Reported by Pylint.
Line: 255
Column: 5
ids=["float", "datetime64[ns]"],
)
def test_arrow_table_roundtrip(breaks):
import pyarrow as pa
from pandas.core.arrays._arrow_utils import ArrowIntervalType
arr = IntervalArray.from_breaks(breaks)
arr[1] = None
Reported by Pylint.
Line: 289
Column: 5
ids=["float", "datetime64[ns]"],
)
def test_arrow_table_roundtrip_without_metadata(breaks):
import pyarrow as pa
arr = IntervalArray.from_breaks(breaks)
arr[1] = None
df = pd.DataFrame({"a": arr})
Reported by Pylint.
Line: 104
Column: 27
class TestSetitem:
def test_set_na(self, left_right_dtypes):
left, right = left_right_dtypes
result = IntervalArray.from_arrays(left, right)
if result.dtype.subtype.kind not in ["m", "M"]:
msg = "'value' should be an interval type, got <.*NaTType'> instead."
Reported by Pylint.
Line: 120
Column: 32
result[0] = np.nan
expected_left = Index([left._na_value] + list(left[1:]))
expected_right = Index([right._na_value] + list(right[1:]))
expected = IntervalArray.from_arrays(expected_left, expected_right)
tm.assert_extension_array_equal(result, expected)
Reported by Pylint.
Line: 121
Column: 33
result[0] = np.nan
expected_left = Index([left._na_value] + list(left[1:]))
expected_right = Index([right._na_value] + list(right[1:]))
expected = IntervalArray.from_arrays(expected_left, expected_right)
tm.assert_extension_array_equal(result, expected)
def test_setitem_mismatched_closed(self):
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
import pandas.util._test_decorators as td
import pandas as pd
from pandas import (
Index,
Interval,
Reported by Pylint.
pandas/tests/frame/methods/test_combine_first.py
74 issues
Line: 4
Column: 1
from datetime import datetime
import numpy as np
import pytest
from pandas.core.dtypes.cast import (
find_common_type,
is_dtype_equal,
)
Reported by Pylint.
Line: 212
Column: 3
)
tm.assert_frame_equal(res, exp)
assert res["a"].dtype == "datetime64[ns]"
# ToDo: this must be int64
assert res["b"].dtype == "int64"
res = dfa.iloc[:0].combine_first(dfb)
exp = DataFrame({"a": [np.nan, np.nan], "b": [4, 5]}, columns=["a", "b"])
tm.assert_frame_equal(res, exp)
Reported by Pylint.
Line: 218
Column: 3
res = dfa.iloc[:0].combine_first(dfb)
exp = DataFrame({"a": [np.nan, np.nan], "b": [4, 5]}, columns=["a", "b"])
tm.assert_frame_equal(res, exp)
# ToDo: this must be datetime64
assert res["a"].dtype == "float64"
# ToDo: this must be int64
assert res["b"].dtype == "int64"
def test_combine_first_timezone(self):
Reported by Pylint.
Line: 220
Column: 3
tm.assert_frame_equal(res, exp)
# ToDo: this must be datetime64
assert res["a"].dtype == "float64"
# ToDo: this must be int64
assert res["b"].dtype == "int64"
def test_combine_first_timezone(self):
# see gh-7630
data1 = pd.to_datetime("20100101 01:01").tz_localize("UTC")
Reported by Pylint.
Line: 1
Column: 1
from datetime import datetime
import numpy as np
import pytest
from pandas.core.dtypes.cast import (
find_common_type,
is_dtype_equal,
)
Reported by Pylint.
Line: 21
Column: 1
import pandas._testing as tm
class TestDataFrameCombineFirst:
def test_combine_first_mixed(self):
a = Series(["a", "b"], index=range(2))
b = Series(range(2), index=range(2))
f = DataFrame({"A": a, "B": b})
Reported by Pylint.
Line: 22
Column: 5
class TestDataFrameCombineFirst:
def test_combine_first_mixed(self):
a = Series(["a", "b"], index=range(2))
b = Series(range(2), index=range(2))
f = DataFrame({"A": a, "B": b})
a = Series(["a", "b"], index=range(5, 7))
Reported by Pylint.
Line: 22
Column: 5
class TestDataFrameCombineFirst:
def test_combine_first_mixed(self):
a = Series(["a", "b"], index=range(2))
b = Series(range(2), index=range(2))
f = DataFrame({"A": a, "B": b})
a = Series(["a", "b"], index=range(5, 7))
Reported by Pylint.
Line: 23
Column: 9
class TestDataFrameCombineFirst:
def test_combine_first_mixed(self):
a = Series(["a", "b"], index=range(2))
b = Series(range(2), index=range(2))
f = DataFrame({"A": a, "B": b})
a = Series(["a", "b"], index=range(5, 7))
b = Series(range(2), index=range(5, 7))
Reported by Pylint.
Line: 24
Column: 9
class TestDataFrameCombineFirst:
def test_combine_first_mixed(self):
a = Series(["a", "b"], index=range(2))
b = Series(range(2), index=range(2))
f = DataFrame({"A": a, "B": b})
a = Series(["a", "b"], index=range(5, 7))
b = Series(range(2), index=range(5, 7))
g = DataFrame({"A": a, "B": b})
Reported by Pylint.