The following issues were found
pandas/tests/io/formats/test_eng_formatting.py
28 issues
Line: 36
Column: 34
tm.reset_display_options()
def compare(self, formatter, input, output):
formatted_input = formatter(input)
assert formatted_input == output
def compare_all(self, formatter, in_out):
"""
Reported by Pylint.
Line: 51
Column: 13
*number* should be >= 0 because formatter(-number) == fmt is also
tested. *fmt* is derived from *expected_formatting*
"""
for input, output in in_out:
self.compare(formatter, input, output)
self.compare(formatter, -input, "-" + output[1:])
def test_exponents_with_eng_prefix(self):
formatter = fmt.EngFormatter(accuracy=3, use_eng_prefix=True)
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
from pandas import DataFrame
import pandas._testing as tm
import pandas.io.formats.format as fmt
class TestEngFormatter:
Reported by Pylint.
Line: 9
Column: 1
import pandas.io.formats.format as fmt
class TestEngFormatter:
def test_eng_float_formatter(self):
df = DataFrame({"A": [1.41, 141.0, 14100, 1410000.0]})
fmt.set_eng_float_format()
result = df.to_string()
Reported by Pylint.
Line: 10
Column: 5
class TestEngFormatter:
def test_eng_float_formatter(self):
df = DataFrame({"A": [1.41, 141.0, 14100, 1410000.0]})
fmt.set_eng_float_format()
result = df.to_string()
expected = (
Reported by Pylint.
Line: 10
Column: 5
class TestEngFormatter:
def test_eng_float_formatter(self):
df = DataFrame({"A": [1.41, 141.0, 14100, 1410000.0]})
fmt.set_eng_float_format()
result = df.to_string()
expected = (
Reported by Pylint.
Line: 11
Column: 9
class TestEngFormatter:
def test_eng_float_formatter(self):
df = DataFrame({"A": [1.41, 141.0, 14100, 1410000.0]})
fmt.set_eng_float_format()
result = df.to_string()
expected = (
" A\n"
Reported by Pylint.
Line: 22
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
"2 14.100E+03\n"
"3 1.410E+06"
)
assert result == expected
fmt.set_eng_float_format(use_eng_prefix=True)
result = df.to_string()
expected = " A\n0 1.410\n1 141.000\n2 14.100k\n3 1.410M"
assert result == expected
Reported by Bandit.
Line: 27
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
fmt.set_eng_float_format(use_eng_prefix=True)
result = df.to_string()
expected = " A\n0 1.410\n1 141.000\n2 14.100k\n3 1.410M"
assert result == expected
fmt.set_eng_float_format(accuracy=0)
result = df.to_string()
expected = " A\n0 1E+00\n1 141E+00\n2 14E+03\n3 1E+06"
assert result == expected
Reported by Bandit.
Line: 32
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
fmt.set_eng_float_format(accuracy=0)
result = df.to_string()
expected = " A\n0 1E+00\n1 141E+00\n2 14E+03\n3 1E+06"
assert result == expected
tm.reset_display_options()
def compare(self, formatter, input, output):
formatted_input = formatter(input)
Reported by Bandit.
pandas/core/arrays/_mixins.py
28 issues
Line: 13
Column: 1
import numpy as np
from pandas._libs import lib
from pandas._libs.arrays import NDArrayBacked
from pandas._typing import (
F,
PositionalIndexer2D,
Shape,
Reported by Pylint.
Line: 14
Column: 1
import numpy as np
from pandas._libs import lib
from pandas._libs.arrays import NDArrayBacked
from pandas._typing import (
F,
PositionalIndexer2D,
Shape,
type_t,
Reported by Pylint.
Line: 14
Column: 1
import numpy as np
from pandas._libs import lib
from pandas._libs.arrays import NDArrayBacked
from pandas._typing import (
F,
PositionalIndexer2D,
Shape,
type_t,
Reported by Pylint.
Line: 60
Column: 17
if self.ndim == 1:
return meth(self, *args, **kwargs)
flags = self._ndarray.flags
flat = self.ravel("K")
result = meth(flat, *args, **kwargs)
order = "F" if flags.f_contiguous else "C"
return result.reshape(self.shape, order=order)
Reported by Pylint.
Line: 88
Column: 5
# ------------------------------------------------------------------------
def take(
self: NDArrayBackedExtensionArrayT,
indices: Sequence[int],
*,
allow_fill: bool = False,
fill_value: Any = None,
Reported by Pylint.
Line: 117
Column: 53
return False
if not is_dtype_equal(self.dtype, other.dtype):
return False
return bool(array_equivalent(self._ndarray, other._ndarray))
def _values_for_argsort(self) -> np.ndarray:
return self._ndarray
# Signature of "argmin" incompatible with supertype "ExtensionArray"
Reported by Pylint.
Line: 123
Column: 5
return self._ndarray
# Signature of "argmin" incompatible with supertype "ExtensionArray"
def argmin(self, axis: int = 0, skipna: bool = True): # type:ignore[override]
# override base class by adding axis keyword
validate_bool_kwarg(skipna, "skipna")
if not skipna and self.isna().any():
raise NotImplementedError
return nargminmax(self, "argmin", axis=axis)
Reported by Pylint.
Line: 131
Column: 5
return nargminmax(self, "argmin", axis=axis)
# Signature of "argmax" incompatible with supertype "ExtensionArray"
def argmax(self, axis: int = 0, skipna: bool = True): # type:ignore[override]
# override base class by adding axis keyword
validate_bool_kwarg(skipna, "skipna")
if not skipna and self.isna().any():
raise NotImplementedError
return nargminmax(self, "argmax", axis=axis)
Reported by Pylint.
Line: 144
Column: 5
@classmethod
@doc(ExtensionArray._concat_same_type)
def _concat_same_type(
cls: type[NDArrayBackedExtensionArrayT],
to_concat: Sequence[NDArrayBackedExtensionArrayT],
axis: int = 0,
) -> NDArrayBackedExtensionArrayT:
dtypes = {str(x.dtype) for x in to_concat}
Reported by Pylint.
Line: 153
Column: 23
if len(dtypes) != 1:
raise ValueError("to_concat must have the same dtype (tz)", dtypes)
new_values = [x._ndarray for x in to_concat]
new_values = np.concatenate(new_values, axis=axis)
# error: Argument 1 to "_from_backing_data" of "NDArrayBackedExtensionArray" has
# incompatible type "List[ndarray]"; expected "ndarray"
return to_concat[0]._from_backing_data(new_values) # type: ignore[arg-type]
Reported by Pylint.
pandas/tests/extension/decimal/array.py
28 issues
Line: 68
Column: 44
class DecimalArray(OpsMixin, ExtensionScalarOpsMixin, ExtensionArray):
__array_priority__ = 1000
def __init__(self, values, dtype=None, copy=False, context=None):
for i, val in enumerate(values):
if is_float(val) and np.isnan(val):
values[i] = DecimalDtype.na_value
elif not isinstance(val, decimal.Decimal):
raise TypeError("All values must be of type " + str(decimal.Decimal))
Reported by Pylint.
Line: 68
Column: 32
class DecimalArray(OpsMixin, ExtensionScalarOpsMixin, ExtensionArray):
__array_priority__ = 1000
def __init__(self, values, dtype=None, copy=False, context=None):
for i, val in enumerate(values):
if is_float(val) and np.isnan(val):
values[i] = DecimalDtype.na_value
elif not isinstance(val, decimal.Decimal):
raise TypeError("All values must be of type " + str(decimal.Decimal))
Reported by Pylint.
Line: 90
Column: 38
return self._dtype
@classmethod
def _from_sequence(cls, scalars, dtype=None, copy=False):
return cls(scalars)
@classmethod
def _from_sequence_of_strings(cls, strings, dtype=None, copy=False):
return cls._from_sequence([decimal.Decimal(x) for x in strings], dtype, copy)
Reported by Pylint.
Line: 90
Column: 50
return self._dtype
@classmethod
def _from_sequence(cls, scalars, dtype=None, copy=False):
return cls(scalars)
@classmethod
def _from_sequence_of_strings(cls, strings, dtype=None, copy=False):
return cls._from_sequence([decimal.Decimal(x) for x in strings], dtype, copy)
Reported by Pylint.
Line: 90
Column: 5
return self._dtype
@classmethod
def _from_sequence(cls, scalars, dtype=None, copy=False):
return cls(scalars)
@classmethod
def _from_sequence_of_strings(cls, strings, dtype=None, copy=False):
return cls._from_sequence([decimal.Decimal(x) for x in strings], dtype, copy)
Reported by Pylint.
Line: 94
Column: 5
return cls(scalars)
@classmethod
def _from_sequence_of_strings(cls, strings, dtype=None, copy=False):
return cls._from_sequence([decimal.Decimal(x) for x in strings], dtype, copy)
@classmethod
def _from_factorized(cls, values, original):
return cls(values)
Reported by Pylint.
Line: 103
Column: 5
_HANDLED_TYPES = (decimal.Decimal, numbers.Number, np.ndarray)
def to_numpy(
self, dtype=None, copy: bool = False, na_value=no_default, decimals=None
) -> np.ndarray:
result = np.asarray(self, dtype=dtype)
if decimals is not None:
result = np.asarray([round(x, decimals) for x in result])
Reported by Pylint.
Line: 118
Column: 24
):
return NotImplemented
inputs = tuple(x._data if isinstance(x, DecimalArray) else x for x in inputs)
result = getattr(ufunc, method)(*inputs, **kwargs)
def reconstruct(x):
if isinstance(x, (decimal.Decimal, numbers.Number)):
return x
Reported by Pylint.
Line: 140
Column: 5
item = pd.api.indexers.check_array_indexer(self, item)
return type(self)(self._data[item])
def take(self, indexer, allow_fill=False, fill_value=None):
from pandas.api.extensions import take
data = self._data
if allow_fill and fill_value is None:
fill_value = self.dtype.na_value
Reported by Pylint.
Line: 206
Column: 36
@classmethod
def _concat_same_type(cls, to_concat):
return cls(np.concatenate([x._data for x in to_concat]))
def _reduce(self, name: str, *, skipna: bool = True, **kwargs):
if skipna:
# If we don't have any NAs, we can ignore skipna
Reported by Pylint.
pandas/tests/series/methods/test_describe.py
28 issues
Line: 1
Column: 1
import numpy as np
from pandas import (
Period,
Series,
Timedelta,
Timestamp,
date_range,
)
Reported by Pylint.
Line: 13
Column: 1
import pandas._testing as tm
class TestSeriesDescribe:
def test_describe_ints(self):
ser = Series([0, 1, 2, 3, 4], name="int_data")
result = ser.describe()
expected = Series(
[5, 2, ser.std(), 0, 1, 2, 3, 4],
Reported by Pylint.
Line: 14
Column: 5
class TestSeriesDescribe:
def test_describe_ints(self):
ser = Series([0, 1, 2, 3, 4], name="int_data")
result = ser.describe()
expected = Series(
[5, 2, ser.std(), 0, 1, 2, 3, 4],
name="int_data",
Reported by Pylint.
Line: 14
Column: 5
class TestSeriesDescribe:
def test_describe_ints(self):
ser = Series([0, 1, 2, 3, 4], name="int_data")
result = ser.describe()
expected = Series(
[5, 2, ser.std(), 0, 1, 2, 3, 4],
name="int_data",
Reported by Pylint.
Line: 24
Column: 5
)
tm.assert_series_equal(result, expected)
def test_describe_bools(self):
ser = Series([True, True, False, False, False], name="bool_data")
result = ser.describe()
expected = Series(
[5, 2, False, 3], name="bool_data", index=["count", "unique", "top", "freq"]
)
Reported by Pylint.
Line: 24
Column: 5
)
tm.assert_series_equal(result, expected)
def test_describe_bools(self):
ser = Series([True, True, False, False, False], name="bool_data")
result = ser.describe()
expected = Series(
[5, 2, False, 3], name="bool_data", index=["count", "unique", "top", "freq"]
)
Reported by Pylint.
Line: 32
Column: 5
)
tm.assert_series_equal(result, expected)
def test_describe_strs(self):
ser = Series(["a", "a", "b", "c", "d"], name="str_data")
result = ser.describe()
expected = Series(
[5, 4, "a", 2], name="str_data", index=["count", "unique", "top", "freq"]
Reported by Pylint.
Line: 32
Column: 5
)
tm.assert_series_equal(result, expected)
def test_describe_strs(self):
ser = Series(["a", "a", "b", "c", "d"], name="str_data")
result = ser.describe()
expected = Series(
[5, 4, "a", 2], name="str_data", index=["count", "unique", "top", "freq"]
Reported by Pylint.
Line: 41
Column: 5
)
tm.assert_series_equal(result, expected)
def test_describe_timedelta64(self):
ser = Series(
[
Timedelta("1 days"),
Timedelta("2 days"),
Timedelta("3 days"),
Reported by Pylint.
Line: 41
Column: 5
)
tm.assert_series_equal(result, expected)
def test_describe_timedelta64(self):
ser = Series(
[
Timedelta("1 days"),
Timedelta("2 days"),
Timedelta("3 days"),
Reported by Pylint.
pandas/tests/io/parser/test_encoding.py
28 issues
Line: 11
Column: 1
import tempfile
import numpy as np
import pytest
from pandas import (
DataFrame,
read_csv,
)
Reported by Pylint.
Line: 147
Column: 18
],
)
def test_binary_mode_file_buffers(
all_parsers, csv_dir_path, file_path, encoding, datapath
):
# gh-23779: Python csv engine shouldn't error on files opened in binary.
# gh-31575: Python csv engine shouldn't error on files opened in raw binary.
parser = all_parsers
Reported by Pylint.
Line: 20
Column: 1
import pandas._testing as tm
def test_bytes_io_input(all_parsers):
encoding = "cp1255"
parser = all_parsers
data = BytesIO("שלום:1234\n562:123".encode(encoding))
result = parser.read_csv(data, sep=":", encoding=encoding)
Reported by Pylint.
Line: 31
Column: 1
tm.assert_frame_equal(result, expected)
def test_read_csv_unicode(all_parsers):
parser = all_parsers
data = BytesIO("\u0141aski, Jan;1".encode())
result = parser.read_csv(data, sep=";", encoding="utf-8", header=None)
expected = DataFrame([["\u0141aski, Jan", 1]])
Reported by Pylint.
Line: 42
Column: 1
@pytest.mark.parametrize("sep", [",", "\t"])
@pytest.mark.parametrize("encoding", ["utf-16", "utf-16le", "utf-16be"])
def test_utf16_bom_skiprows(all_parsers, sep, encoding):
# see gh-2298
parser = all_parsers
data = """skip this
skip this too
A,B,C
Reported by Pylint.
Line: 57
Column: 9
utf8 = "utf-8"
with tm.ensure_clean(path) as path:
from io import TextIOWrapper
bytes_data = data.encode(encoding)
with open(path, "wb") as f:
f.write(bytes_data)
Reported by Pylint.
Line: 61
Column: 34
bytes_data = data.encode(encoding)
with open(path, "wb") as f:
f.write(bytes_data)
bytes_buffer = BytesIO(data.encode(utf8))
bytes_buffer = TextIOWrapper(bytes_buffer, encoding=utf8)
Reported by Pylint.
Line: 74
Column: 1
tm.assert_frame_equal(result, expected)
def test_utf16_example(all_parsers, csv_dir_path):
path = os.path.join(csv_dir_path, "utf16_ex.txt")
parser = all_parsers
result = parser.read_csv(path, encoding="utf-16", sep="\t")
assert len(result) == 50
Reported by Pylint.
Line: 78
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
path = os.path.join(csv_dir_path, "utf16_ex.txt")
parser = all_parsers
result = parser.read_csv(path, encoding="utf-16", sep="\t")
assert len(result) == 50
def test_unicode_encoding(all_parsers, csv_dir_path):
path = os.path.join(csv_dir_path, "unicode_series.csv")
parser = all_parsers
Reported by Bandit.
Line: 81
Column: 1
assert len(result) == 50
def test_unicode_encoding(all_parsers, csv_dir_path):
path = os.path.join(csv_dir_path, "unicode_series.csv")
parser = all_parsers
result = parser.read_csv(path, header=None, encoding="latin-1")
result = result.set_index(0)
Reported by Pylint.
pandas/tests/arrays/masked/test_arrow_compat.py
28 issues
Line: 2
Column: 1
import numpy as np
import pytest
import pandas.util._test_decorators as td
import pandas as pd
import pandas._testing as tm
pa = pytest.importorskip("pyarrow", minversion="0.17.0")
Reported by Pylint.
Line: 23
Column: 22
return request.param
def test_arrow_array(data):
arr = pa.array(data)
expected = pa.array(
data.to_numpy(object, na_value=None),
type=pa.from_numpy_dtype(data.dtype.numpy_dtype),
)
Reported by Pylint.
Line: 33
Column: 26
@td.skip_if_no("pyarrow")
def test_arrow_roundtrip(data):
df = pd.DataFrame({"a": data})
table = pa.table(df)
assert table.field("a").type == str(data.dtype.numpy_dtype)
result = table.to_pandas()
assert result["a"].dtype == data.dtype
Reported by Pylint.
Line: 43
Column: 38
@td.skip_if_no("pyarrow")
def test_arrow_load_from_zero_chunks(data):
# GH-41040
df = pd.DataFrame({"a": data[0:0]})
table = pa.table(df)
assert table.field("a").type == str(data.dtype.numpy_dtype)
Reported by Pylint.
Line: 70
Column: 23
@td.skip_if_no("pyarrow")
def test_arrow_sliced(data):
# https://github.com/pandas-dev/pandas/issues/38525
df = pd.DataFrame({"a": data})
table = pa.table(df)
result = table.slice(2, None).to_pandas()
Reported by Pylint.
Line: 101
Column: 42
return np_dtype, pa_array, np_expected, mask_expected
def test_pyarrow_array_to_numpy_and_mask(np_dtype_to_arrays):
"""
Test conversion from pyarrow array to numpy array.
Modifies the pyarrow buffer to contain padding and offset, which are
considered valid buffers by pyarrow.
Reported by Pylint.
Line: 112
Column: 5
See https://github.com/pandas-dev/pandas/issues/40896
"""
np_dtype, pa_array, np_expected, mask_expected = np_dtype_to_arrays
data, mask = pyarrow_array_to_numpy_and_mask(pa_array, np_dtype)
tm.assert_numpy_array_equal(data[:3], np_expected)
tm.assert_numpy_array_equal(mask, mask_expected)
mask_buffer = pa_array.buffers()[0]
data_buffer = pa_array.buffers()[1]
Reported by Pylint.
Line: 165
Column: 41
@td.skip_if_no("pyarrow")
def test_from_arrow_type_error(request, data):
# ensure that __from_arrow__ returns a TypeError when getting a wrong
# array type
if data.dtype != "boolean":
# TODO numeric dtypes cast any incoming array to the correct dtype
# instead of erroring
Reported by Pylint.
Line: 169
Column: 3
# ensure that __from_arrow__ returns a TypeError when getting a wrong
# array type
if data.dtype != "boolean":
# TODO numeric dtypes cast any incoming array to the correct dtype
# instead of erroring
request.node.add_marker(
pytest.mark.xfail(raises=None, reason="numeric dtypes don't error but cast")
)
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
import pandas.util._test_decorators as td
import pandas as pd
import pandas._testing as tm
pa = pytest.importorskip("pyarrow", minversion="0.17.0")
Reported by Pylint.
pandas/tests/arrays/categorical/test_take.py
28 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
Categorical,
Index,
)
import pandas._testing as tm
Reported by Pylint.
Line: 95
Column: 13
ci = Index(cat)
with tm.assert_produces_warning(FutureWarning):
ci.take_nd([0, 1])
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas import (
Categorical,
Index,
)
import pandas._testing as tm
Reported by Pylint.
Line: 11
Column: 1
import pandas._testing as tm
class TestTake:
# https://github.com/pandas-dev/pandas/issues/20664
def test_take_default_allow_fill(self):
cat = Categorical(["a", "b"])
with tm.assert_produces_warning(None):
Reported by Pylint.
Line: 14
Column: 5
class TestTake:
# https://github.com/pandas-dev/pandas/issues/20664
def test_take_default_allow_fill(self):
cat = Categorical(["a", "b"])
with tm.assert_produces_warning(None):
result = cat.take([0, -1])
assert result.equals(cat)
Reported by Pylint.
Line: 14
Column: 5
class TestTake:
# https://github.com/pandas-dev/pandas/issues/20664
def test_take_default_allow_fill(self):
cat = Categorical(["a", "b"])
with tm.assert_produces_warning(None):
result = cat.take([0, -1])
assert result.equals(cat)
Reported by Pylint.
Line: 19
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
with tm.assert_produces_warning(None):
result = cat.take([0, -1])
assert result.equals(cat)
def test_take_positive_no_warning(self):
cat = Categorical(["a", "b"])
with tm.assert_produces_warning(None):
cat.take([0, 0])
Reported by Bandit.
Line: 21
Column: 5
assert result.equals(cat)
def test_take_positive_no_warning(self):
cat = Categorical(["a", "b"])
with tm.assert_produces_warning(None):
cat.take([0, 0])
def test_take_bounds(self, allow_fill):
Reported by Pylint.
Line: 21
Column: 5
assert result.equals(cat)
def test_take_positive_no_warning(self):
cat = Categorical(["a", "b"])
with tm.assert_produces_warning(None):
cat.take([0, 0])
def test_take_bounds(self, allow_fill):
Reported by Pylint.
Line: 26
Column: 5
with tm.assert_produces_warning(None):
cat.take([0, 0])
def test_take_bounds(self, allow_fill):
# https://github.com/pandas-dev/pandas/issues/20664
cat = Categorical(["a", "b", "a"])
if allow_fill:
msg = "indices are out-of-bounds"
else:
Reported by Pylint.
pandas/tests/tseries/offsets/test_dst.py
28 issues
Line: 6
Column: 1
"""
from datetime import timedelta
import pytest
import pytz
from pandas._libs.tslibs import Timestamp
from pandas._libs.tslibs.offsets import (
BMonthBegin,
Reported by Pylint.
Line: 7
Column: 1
from datetime import timedelta
import pytest
import pytz
from pandas._libs.tslibs import Timestamp
from pandas._libs.tslibs.offsets import (
BMonthBegin,
BMonthEnd,
Reported by Pylint.
Line: 10
Column: 1
import pytz
from pandas._libs.tslibs import Timestamp
from pandas._libs.tslibs.offsets import (
BMonthBegin,
BMonthEnd,
BQuarterBegin,
BQuarterEnd,
BYearBegin,
Reported by Pylint.
Line: 10
Column: 1
import pytz
from pandas._libs.tslibs import Timestamp
from pandas._libs.tslibs.offsets import (
BMonthBegin,
BMonthEnd,
BQuarterBegin,
BQuarterEnd,
BYearBegin,
Reported by Pylint.
Line: 225
Column: 9
msg = f"Cannot infer dst time from {target_dt}, try using the 'ambiguous' argument"
with pytest.raises(pytz.AmbiguousTimeError, match=msg):
localized_dt + offset
Reported by Pylint.
Line: 36
Column: 1
from pandas.tests.tseries.offsets.test_offsets import get_utc_offset_hours
class TestDST:
# one microsecond before the DST transition
ts_pre_fallback = "2013-11-03 01:59:59.999999"
ts_pre_springfwd = "2013-03-10 01:59:59.999999"
Reported by Pylint.
Line: 65
Column: 5
"microseconds",
]
def _test_all_offsets(self, n, **kwds):
valid_offsets = (
self.valid_date_offsets_plural
if n > 1
else self.valid_date_offsets_singular
)
Reported by Pylint.
Line: 78
Column: 9
def _test_offset(self, offset_name, offset_n, tstart, expected_utc_offset):
offset = DateOffset(**{offset_name: offset_n})
t = tstart + offset
if expected_utc_offset is not None:
assert get_utc_offset_hours(t) == expected_utc_offset
if offset_name == "weeks":
# dates should match
Reported by Pylint.
Line: 80
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
t = tstart + offset
if expected_utc_offset is not None:
assert get_utc_offset_hours(t) == expected_utc_offset
if offset_name == "weeks":
# dates should match
assert t.date() == timedelta(days=7 * offset.kwds["weeks"]) + tstart.date()
# expect the same day of week, hour of day, minute, second, ...
Reported by Bandit.
Line: 84
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
if offset_name == "weeks":
# dates should match
assert t.date() == timedelta(days=7 * offset.kwds["weeks"]) + tstart.date()
# expect the same day of week, hour of day, minute, second, ...
assert (
t.dayofweek == tstart.dayofweek
and t.hour == tstart.hour
and t.minute == tstart.minute
Reported by Bandit.
pandas/tests/arrays/boolean/test_comparison.py
28 issues
Line: 2
Column: 1
import numpy as np
import pytest
import pandas as pd
import pandas._testing as tm
from pandas.arrays import BooleanArray
from pandas.tests.extension.base import BaseOpsUtil
Reported by Pylint.
Line: 19
Column: 30
class TestComparisonOps(BaseOpsUtil):
def _compare_other(self, data, op_name, other):
op = self.get_op_from_name(op_name)
# array
result = pd.Series(op(data, other))
expected = pd.Series(op(data._data, other), dtype="boolean")
Reported by Pylint.
Line: 24
Column: 33
# array
result = pd.Series(op(data, other))
expected = pd.Series(op(data._data, other), dtype="boolean")
# propagate NAs
expected[data._mask] = pd.NA
tm.assert_series_equal(result, expected)
Reported by Pylint.
Line: 26
Column: 18
result = pd.Series(op(data, other))
expected = pd.Series(op(data._data, other), dtype="boolean")
# propagate NAs
expected[data._mask] = pd.NA
tm.assert_series_equal(result, expected)
# series
s = pd.Series(data)
Reported by Pylint.
Line: 34
Column: 30
s = pd.Series(data)
result = op(s, other)
expected = pd.Series(data._data)
expected = op(expected, other)
expected = expected.astype("boolean")
# propagate NAs
expected[data._mask] = pd.NA
Reported by Pylint.
Line: 38
Column: 18
expected = op(expected, other)
expected = expected.astype("boolean")
# propagate NAs
expected[data._mask] = pd.NA
tm.assert_series_equal(result, expected)
def test_compare_scalar(self, data, all_compare_operators):
op_name = all_compare_operators
Reported by Pylint.
Line: 42
Column: 35
tm.assert_series_equal(result, expected)
def test_compare_scalar(self, data, all_compare_operators):
op_name = all_compare_operators
self._compare_other(data, op_name, True)
def test_compare_array(self, data, all_compare_operators):
op_name = all_compare_operators
Reported by Pylint.
Line: 46
Column: 34
op_name = all_compare_operators
self._compare_other(data, op_name, True)
def test_compare_array(self, data, all_compare_operators):
op_name = all_compare_operators
other = pd.array([True] * len(data), dtype="boolean")
self._compare_other(data, op_name, other)
other = np.array([True] * len(data))
self._compare_other(data, op_name, other)
Reported by Pylint.
Line: 65
Column: 25
if other is pd.NA:
expected = pd.array([None, None, None], dtype="boolean")
else:
values = op(a._data, other)
expected = BooleanArray(values, a._mask, copy=True)
tm.assert_extension_array_equal(result, expected)
# ensure we haven't mutated anything inplace
result[0] = None
Reported by Pylint.
Line: 66
Column: 45
expected = pd.array([None, None, None], dtype="boolean")
else:
values = op(a._data, other)
expected = BooleanArray(values, a._mask, copy=True)
tm.assert_extension_array_equal(result, expected)
# ensure we haven't mutated anything inplace
result[0] = None
tm.assert_extension_array_equal(
Reported by Pylint.
pandas/tests/indexes/interval/test_setops.py
28 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
Index,
IntervalIndex,
Timestamp,
interval_range,
)
Reported by Pylint.
Line: 178
Column: 3
index = monotonic_index(0, 11, closed=closed)
set_op = getattr(index, op_name)
# TODO: standardize return type of non-union setops type(self vs other)
# non-IntervalIndex
if op_name == "difference":
expected = index
else:
expected = getattr(index.astype("O"), op_name)(Index([1, 2, 3]))
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas import (
Index,
IntervalIndex,
Timestamp,
interval_range,
)
Reported by Pylint.
Line: 13
Column: 1
import pandas._testing as tm
def monotonic_index(start, end, dtype="int64", closed="right"):
return IntervalIndex.from_breaks(np.arange(start, end, dtype=dtype), closed=closed)
def empty_index(dtype="int64", closed="right"):
return IntervalIndex(np.array([], dtype=dtype), closed=closed)
Reported by Pylint.
Line: 17
Column: 1
return IntervalIndex.from_breaks(np.arange(start, end, dtype=dtype), closed=closed)
def empty_index(dtype="int64", closed="right"):
return IntervalIndex(np.array([], dtype=dtype), closed=closed)
class TestIntervalIndex:
def test_union(self, closed, sort):
Reported by Pylint.
Line: 21
Column: 1
return IntervalIndex(np.array([], dtype=dtype), closed=closed)
class TestIntervalIndex:
def test_union(self, closed, sort):
index = monotonic_index(0, 11, closed=closed)
other = monotonic_index(5, 13, closed=closed)
expected = monotonic_index(0, 13, closed=closed)
Reported by Pylint.
Line: 22
Column: 5
class TestIntervalIndex:
def test_union(self, closed, sort):
index = monotonic_index(0, 11, closed=closed)
other = monotonic_index(5, 13, closed=closed)
expected = monotonic_index(0, 13, closed=closed)
result = index[::-1].union(other, sort=sort)
Reported by Pylint.
Line: 22
Column: 5
class TestIntervalIndex:
def test_union(self, closed, sort):
index = monotonic_index(0, 11, closed=closed)
other = monotonic_index(5, 13, closed=closed)
expected = monotonic_index(0, 13, closed=closed)
result = index[::-1].union(other, sort=sort)
Reported by Pylint.
Line: 30
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
result = index[::-1].union(other, sort=sort)
if sort is None:
tm.assert_index_equal(result, expected)
assert tm.equalContents(result, expected)
result = other[::-1].union(index, sort=sort)
if sort is None:
tm.assert_index_equal(result, expected)
assert tm.equalContents(result, expected)
Reported by Bandit.
Line: 35
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
result = other[::-1].union(index, sort=sort)
if sort is None:
tm.assert_index_equal(result, expected)
assert tm.equalContents(result, expected)
tm.assert_index_equal(index.union(index, sort=sort), index)
tm.assert_index_equal(index.union(index[:1], sort=sort), index)
def test_union_empty_result(self, closed, sort):
Reported by Bandit.