The following issues were found
pandas/tests/series/methods/test_tz_localize.py
18 issues
Line: 1
Column: 1
import pytest
import pytz
from pandas._libs.tslibs import timezones
from pandas import (
DatetimeIndex,
NaT,
Series,
Reported by Pylint.
Line: 2
Column: 1
import pytest
import pytz
from pandas._libs.tslibs import timezones
from pandas import (
DatetimeIndex,
NaT,
Series,
Reported by Pylint.
Line: 4
Column: 1
import pytest
import pytz
from pandas._libs.tslibs import timezones
from pandas import (
DatetimeIndex,
NaT,
Series,
Reported by Pylint.
Line: 64
Column: 17
s.tz_localize(tz, nonexistent=method)
elif exp == "invalid":
with pytest.raises(ValueError, match="argument must be one of"):
dti.tz_localize(tz, nonexistent=method)
else:
result = s.tz_localize(tz, nonexistent=method)
expected = Series(1, index=DatetimeIndex([exp] * n, tz=tz))
tm.assert_series_equal(result, expected)
Reported by Pylint.
Line: 64
Column: 17
s.tz_localize(tz, nonexistent=method)
elif exp == "invalid":
with pytest.raises(ValueError, match="argument must be one of"):
dti.tz_localize(tz, nonexistent=method)
else:
result = s.tz_localize(tz, nonexistent=method)
expected = Series(1, index=DatetimeIndex([exp] * n, tz=tz))
tm.assert_series_equal(result, expected)
Reported by Pylint.
Line: 1
Column: 1
import pytest
import pytz
from pandas._libs.tslibs import timezones
from pandas import (
DatetimeIndex,
NaT,
Series,
Reported by Pylint.
Line: 16
Column: 1
import pandas._testing as tm
class TestTZLocalize:
def test_series_tz_localize_ambiguous_bool(self):
# make sure that we are correctly accepting bool values as ambiguous
# GH#14402
ts = Timestamp("2015-11-01 01:00:03")
Reported by Pylint.
Line: 17
Column: 5
class TestTZLocalize:
def test_series_tz_localize_ambiguous_bool(self):
# make sure that we are correctly accepting bool values as ambiguous
# GH#14402
ts = Timestamp("2015-11-01 01:00:03")
expected0 = Timestamp("2015-11-01 01:00:03-0500", tz="US/Central")
Reported by Pylint.
Line: 17
Column: 5
class TestTZLocalize:
def test_series_tz_localize_ambiguous_bool(self):
# make sure that we are correctly accepting bool values as ambiguous
# GH#14402
ts = Timestamp("2015-11-01 01:00:03")
expected0 = Timestamp("2015-11-01 01:00:03-0500", tz="US/Central")
Reported by Pylint.
Line: 21
Column: 9
# make sure that we are correctly accepting bool values as ambiguous
# GH#14402
ts = Timestamp("2015-11-01 01:00:03")
expected0 = Timestamp("2015-11-01 01:00:03-0500", tz="US/Central")
expected1 = Timestamp("2015-11-01 01:00:03-0600", tz="US/Central")
ser = Series([ts])
expected0 = Series([expected0])
Reported by Pylint.
pandas/tests/indexes/multi/test_missing.py
18 issues
Line: 2
Column: 1
import numpy as np
import pytest
import pandas as pd
from pandas import MultiIndex
import pandas._testing as tm
def test_fillna(idx):
Reported by Pylint.
Line: 72
Column: 33
# cases in indices doesn't include NaN
expected = np.array([False] * len(index), dtype=bool)
tm.assert_numpy_array_equal(index._isnan, expected)
assert index.hasnans is False
index = idx.copy()
values = index.values
values[1] = np.nan
Reported by Pylint.
Line: 83
Column: 33
expected = np.array([False] * len(index), dtype=bool)
expected[1] = True
tm.assert_numpy_array_equal(index._isnan, expected)
assert index.hasnans is True
def test_nan_stays_float():
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
import pandas as pd
from pandas import MultiIndex
import pandas._testing as tm
def test_fillna(idx):
Reported by Pylint.
Line: 9
Column: 1
import pandas._testing as tm
def test_fillna(idx):
# GH 11343
msg = "isna is not defined for MultiIndex"
with pytest.raises(NotImplementedError, match=msg):
idx.fillna(idx[0])
Reported by Pylint.
Line: 16
Column: 1
idx.fillna(idx[0])
def test_dropna():
# GH 6194
idx = MultiIndex.from_arrays(
[
[1, np.nan, 3, np.nan, 5],
[1, 2, np.nan, np.nan, 5],
Reported by Pylint.
Line: 56
Column: 1
tm.assert_index_equal(idx.dropna(how="all"), expected)
def test_nulls(idx):
# this is really a smoke test for the methods
# as these are adequately tested for function elsewhere
msg = "isna is not defined for MultiIndex"
with pytest.raises(NotImplementedError, match=msg):
Reported by Pylint.
Line: 66
Column: 1
@pytest.mark.xfail(reason="isna is not defined for MultiIndex")
def test_hasnans_isnans(idx):
# GH 11343, added tests for hasnans / isnans
index = idx.copy()
# cases in indices doesn't include NaN
expected = np.array([False] * len(index), dtype=bool)
Reported by Pylint.
Line: 73
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
# cases in indices doesn't include NaN
expected = np.array([False] * len(index), dtype=bool)
tm.assert_numpy_array_equal(index._isnan, expected)
assert index.hasnans is False
index = idx.copy()
values = index.values
values[1] = np.nan
Reported by Bandit.
Line: 84
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
expected = np.array([False] * len(index), dtype=bool)
expected[1] = True
tm.assert_numpy_array_equal(index._isnan, expected)
assert index.hasnans is True
def test_nan_stays_float():
# GH 7031
Reported by Bandit.
pandas/tests/tseries/holiday/test_calendar.py
18 issues
Line: 3
Column: 1
from datetime import datetime
import pytest
from pandas import (
DatetimeIndex,
offsets,
to_datetime,
)
Reported by Pylint.
Line: 24
Column: 66
@pytest.mark.parametrize(
"transform", [lambda x: x, lambda x: x.strftime("%Y-%m-%d"), lambda x: Timestamp(x)]
)
def test_calendar(transform):
start_date = datetime(2012, 1, 1)
end_date = datetime(2012, 12, 31)
Reported by Pylint.
Line: 1
Column: 1
from datetime import datetime
import pytest
from pandas import (
DatetimeIndex,
offsets,
to_datetime,
)
Reported by Pylint.
Line: 25
Column: 1
@pytest.mark.parametrize(
"transform", [lambda x: x, lambda x: x.strftime("%Y-%m-%d"), lambda x: Timestamp(x)]
)
def test_calendar(transform):
start_date = datetime(2012, 1, 1)
end_date = datetime(2012, 12, 31)
calendar = USFederalHolidayCalendar()
Reported by Pylint.
Line: 46
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
datetime(2012, 12, 25),
]
assert list(holidays.to_pydatetime()) == expected
def test_calendar_caching():
# see gh-9552.
Reported by Bandit.
Line: 49
Column: 1
assert list(holidays.to_pydatetime()) == expected
def test_calendar_caching():
# see gh-9552.
class TestCalendar(AbstractHolidayCalendar):
def __init__(self, name=None, rules=None):
super().__init__(name=name, rules=rules)
Reported by Pylint.
Line: 52
Column: 5
def test_calendar_caching():
# see gh-9552.
class TestCalendar(AbstractHolidayCalendar):
def __init__(self, name=None, rules=None):
super().__init__(name=name, rules=rules)
jan1 = TestCalendar(rules=[Holiday("jan1", year=2015, month=1, day=1)])
jan2 = TestCalendar(rules=[Holiday("jan2", year=2015, month=1, day=2)])
Reported by Pylint.
Line: 64
Column: 1
tm.assert_index_equal(jan2.holidays(), DatetimeIndex(["02-Jan-2015"]))
def test_calendar_observance_dates():
# see gh-11477
us_fed_cal = get_calendar("USFederalHolidayCalendar")
holidays0 = us_fed_cal.holidays(
datetime(2015, 7, 3), datetime(2015, 7, 3)
) # <-- same start and end dates
Reported by Pylint.
Line: 86
Column: 1
tm.assert_index_equal(holidays0, holidays2)
def test_rule_from_name():
us_fed_cal = get_calendar("USFederalHolidayCalendar")
assert us_fed_cal.rule_from_name("Thanksgiving") == USThanksgivingDay
def test_calendar_2031():
Reported by Pylint.
Line: 88
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def test_rule_from_name():
us_fed_cal = get_calendar("USFederalHolidayCalendar")
assert us_fed_cal.rule_from_name("Thanksgiving") == USThanksgivingDay
def test_calendar_2031():
# See gh-27790
#
Reported by Bandit.
pandas/tests/series/methods/test_compare.py
18 issues
Line: 2
Column: 1
import numpy as np
import pytest
import pandas as pd
import pandas._testing as tm
@pytest.mark.parametrize("align_axis", [0, 1, "index", "columns"])
def test_compare_axis(align_axis):
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
import pandas as pd
import pandas._testing as tm
@pytest.mark.parametrize("align_axis", [0, 1, "index", "columns"])
def test_compare_axis(align_axis):
Reported by Pylint.
Line: 9
Column: 1
@pytest.mark.parametrize("align_axis", [0, 1, "index", "columns"])
def test_compare_axis(align_axis):
# GH#30429
s1 = pd.Series(["a", "b", "c"])
s2 = pd.Series(["x", "b", "z"])
result = s1.compare(s2, align_axis=align_axis)
Reported by Pylint.
Line: 11
Column: 5
@pytest.mark.parametrize("align_axis", [0, 1, "index", "columns"])
def test_compare_axis(align_axis):
# GH#30429
s1 = pd.Series(["a", "b", "c"])
s2 = pd.Series(["x", "b", "z"])
result = s1.compare(s2, align_axis=align_axis)
if align_axis in (1, "columns"):
Reported by Pylint.
Line: 12
Column: 5
def test_compare_axis(align_axis):
# GH#30429
s1 = pd.Series(["a", "b", "c"])
s2 = pd.Series(["x", "b", "z"])
result = s1.compare(s2, align_axis=align_axis)
if align_axis in (1, "columns"):
indices = pd.Index([0, 2])
Reported by Pylint.
Line: 35
Column: 1
(True, False),
(False, True),
(True, True),
# False, False case is already covered in test_compare_axis
],
)
def test_compare_various_formats(keep_shape, keep_equal):
s1 = pd.Series(["a", "b", "c"])
s2 = pd.Series(["x", "b", "z"])
Reported by Pylint.
Line: 39
Column: 5
],
)
def test_compare_various_formats(keep_shape, keep_equal):
s1 = pd.Series(["a", "b", "c"])
s2 = pd.Series(["x", "b", "z"])
result = s1.compare(s2, keep_shape=keep_shape, keep_equal=keep_equal)
if keep_shape:
Reported by Pylint.
Line: 40
Column: 5
)
def test_compare_various_formats(keep_shape, keep_equal):
s1 = pd.Series(["a", "b", "c"])
s2 = pd.Series(["x", "b", "z"])
result = s1.compare(s2, keep_shape=keep_shape, keep_equal=keep_equal)
if keep_shape:
indices = pd.Index([0, 1, 2])
Reported by Pylint.
Line: 66
Column: 1
tm.assert_frame_equal(result, expected)
def test_compare_with_equal_nulls():
# We want to make sure two NaNs are considered the same
# and dropped where applicable
s1 = pd.Series(["a", "b", np.nan])
s2 = pd.Series(["x", "b", np.nan])
Reported by Pylint.
Line: 69
Column: 5
def test_compare_with_equal_nulls():
# We want to make sure two NaNs are considered the same
# and dropped where applicable
s1 = pd.Series(["a", "b", np.nan])
s2 = pd.Series(["x", "b", np.nan])
result = s1.compare(s2)
expected = pd.DataFrame([["a", "x"]], columns=["self", "other"])
tm.assert_frame_equal(result, expected)
Reported by Pylint.
pandas/tests/io/parser/dtypes/test_categorical.py
18 issues
Line: 9
Column: 1
import os
import numpy as np
import pytest
from pandas.core.dtypes.dtypes import CategoricalDtype
import pandas as pd
from pandas import (
Reported by Pylint.
Line: 28
Column: 1
"category",
CategoricalDtype(),
{"a": "category", "b": "category", "c": CategoricalDtype()},
],
)
def test_categorical_dtype(all_parsers, dtype):
# see gh-10153
parser = all_parsers
data = """a,b,c
Reported by Pylint.
Line: 49
Column: 1
@pytest.mark.parametrize("dtype", [{"b": "category"}, {1: "category"}])
def test_categorical_dtype_single(all_parsers, dtype):
# see gh-10153
parser = all_parsers
data = """a,b,c
1,a,3.4
1,a,3.4
Reported by Pylint.
Line: 63
Column: 1
tm.assert_frame_equal(actual, expected)
def test_categorical_dtype_unsorted(all_parsers):
# see gh-10153
parser = all_parsers
data = """a,b,c
1,b,3.4
1,b,3.4
Reported by Pylint.
Line: 81
Column: 1
tm.assert_frame_equal(actual, expected)
def test_categorical_dtype_missing(all_parsers):
# see gh-10153
parser = all_parsers
data = """a,b,c
1,b,3.4
1,nan,3.4
Reported by Pylint.
Line: 100
Column: 1
@pytest.mark.slow
def test_categorical_dtype_high_cardinality_numeric(all_parsers):
# see gh-18186
parser = all_parsers
data = np.sort([str(i) for i in range(524289)])
expected = DataFrame({"a": Categorical(data, ordered=True)})
Reported by Pylint.
Line: 113
Column: 1
tm.assert_frame_equal(actual, expected)
def test_categorical_dtype_utf16(all_parsers, csv_dir_path):
# see gh-10153
pth = os.path.join(csv_dir_path, "utf16_ex.txt")
parser = all_parsers
encoding = "utf-16"
sep = "\t"
Reported by Pylint.
Line: 127
Column: 1
tm.assert_frame_equal(actual, expected)
def test_categorical_dtype_chunksize_infer_categories(all_parsers):
# see gh-10153
parser = all_parsers
data = """a,b
1,a
1,b
Reported by Pylint.
Line: 146
Column: 1
tm.assert_frame_equal(actual, expected)
def test_categorical_dtype_chunksize_explicit_categories(all_parsers):
# see gh-10153
parser = all_parsers
data = """a,b
1,a
1,b
Reported by Pylint.
Line: 168
Column: 1
tm.assert_frame_equal(actual, expected)
def test_categorical_dtype_latin1(all_parsers, csv_dir_path):
# see gh-10153
pth = os.path.join(csv_dir_path, "unicode_series.csv")
parser = all_parsers
encoding = "latin-1"
Reported by Pylint.
pandas/core/dtypes/concat.py
18 issues
Line: 134
Column: 20
if isinstance(to_concat[0], ExtensionArray):
cls = type(to_concat[0])
return cls._concat_same_type(to_concat)
else:
return np.concatenate(to_concat)
elif all_empty:
# we have all empties, but may need to coerce the result dtype to
Reported by Pylint.
Line: 263
Column: 20
def _maybe_unwrap(x):
if isinstance(x, (ABCCategoricalIndex, ABCSeries)):
return x._values
elif isinstance(x, Categorical):
return x
else:
raise TypeError("all components to combine must be Categorical")
Reported by Pylint.
Line: 279
Column: 12
raise TypeError("dtype of categories must be the same")
ordered = False
if all(first._categories_match_up_to_permutation(other) for other in to_union[1:]):
# identical categories - fastpath
categories = first.categories
ordered = first.ordered
all_codes = [first._encode_with_my_categories(x)._codes for x in to_union]
Reported by Pylint.
Line: 284
Column: 22
categories = first.categories
ordered = first.ordered
all_codes = [first._encode_with_my_categories(x)._codes for x in to_union]
new_codes = np.concatenate(all_codes)
if sort_categories and not ignore_order and ordered:
raise TypeError("Cannot use sort_categories=True with ordered Categoricals")
Reported by Pylint.
Line: 284
Column: 22
categories = first.categories
ordered = first.ordered
all_codes = [first._encode_with_my_categories(x)._codes for x in to_union]
new_codes = np.concatenate(all_codes)
if sort_categories and not ignore_order and ordered:
raise TypeError("Cannot use sort_categories=True with ordered Categoricals")
Reported by Pylint.
Line: 353
Column: 14
# in Timestamp/Timedelta
return _concatenate_2d([x.astype(object) for x in to_concat], axis=axis)
result = type(to_concat[0])._concat_same_type(to_concat, axis=axis)
return result
Reported by Pylint.
Line: 33
Column: 1
)
def cast_to_common_type(arr: ArrayLike, dtype: DtypeObj) -> ArrayLike:
"""
Helper function for `arr.astype(common_dtype)` but handling all special
cases.
"""
if is_dtype_equal(arr.dtype, dtype):
Reported by Pylint.
Line: 99
Column: 5
"""
# filter empty arrays
# 1-d dtypes always are included here
def is_nonempty(x) -> bool:
if x.ndim <= axis:
return True
return x.shape[axis] > 0
# If all arrays are empty, there's nothing to convert, just short-cut to
Reported by Pylint.
Line: 118
Column: 17
kinds = {obj.dtype.kind for obj in to_concat}
contains_datetime = any(kind in ["m", "M"] for kind in kinds)
all_empty = not len(non_empties)
single_dtype = len({x.dtype for x in to_concat}) == 1
any_ea = any(isinstance(x.dtype, ExtensionDtype) for x in to_concat)
if contains_datetime:
return _concat_datetime(to_concat, axis=axis)
Reported by Pylint.
Line: 132
Column: 9
target_dtype = find_common_type([x.dtype for x in to_concat])
to_concat = [cast_to_common_type(arr, target_dtype) for arr in to_concat]
if isinstance(to_concat[0], ExtensionArray):
cls = type(to_concat[0])
return cls._concat_same_type(to_concat)
else:
return np.concatenate(to_concat)
Reported by Pylint.
pandas/tests/groupby/test_groupby_shift_diff.py
18 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
DataFrame,
NaT,
Series,
Timedelta,
Timestamp,
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas import (
DataFrame,
NaT,
Series,
Timedelta,
Timestamp,
Reported by Pylint.
Line: 14
Column: 1
import pandas._testing as tm
def test_group_shift_with_null_key():
# This test is designed to replicate the segfault in issue #13813.
n_rows = 1200
# Generate a moderately large dataframe with occasional missing
# values in column `B`, and then group by [`A`, `B`]. This should
Reported by Pylint.
Line: 22
Column: 5
# values in column `B`, and then group by [`A`, `B`]. This should
# force `-1` in `labels` array of `g.grouper.group_info` exactly
# at those places, where the group-by key is partially missing.
df = DataFrame(
[(i % 12, i % 3 if i % 3 else np.nan, i) for i in range(n_rows)],
dtype=float,
columns=["A", "B", "Z"],
index=None,
)
Reported by Pylint.
Line: 28
Column: 5
columns=["A", "B", "Z"],
index=None,
)
g = df.groupby(["A", "B"])
expected = DataFrame(
[(i + 12 if i % 3 and i < n_rows - 12 else np.nan) for i in range(n_rows)],
dtype=float,
columns=["Z"],
Reported by Pylint.
Line: 41
Column: 1
tm.assert_frame_equal(result, expected)
def test_group_shift_with_fill_value():
# GH #24128
n_rows = 24
df = DataFrame(
[(i % 12, i % 3, i) for i in range(n_rows)],
dtype=float,
Reported by Pylint.
Line: 44
Column: 5
def test_group_shift_with_fill_value():
# GH #24128
n_rows = 24
df = DataFrame(
[(i % 12, i % 3, i) for i in range(n_rows)],
dtype=float,
columns=["A", "B", "Z"],
index=None,
)
Reported by Pylint.
Line: 50
Column: 5
columns=["A", "B", "Z"],
index=None,
)
g = df.groupby(["A", "B"])
expected = DataFrame(
[(i + 12 if i < n_rows - 12 else 0) for i in range(n_rows)],
dtype=float,
columns=["Z"],
Reported by Pylint.
Line: 63
Column: 1
tm.assert_frame_equal(result, expected)
def test_group_shift_lose_timezone():
# GH 30134
now_dt = Timestamp.utcnow()
df = DataFrame({"a": [1, 1], "date": now_dt})
result = df.groupby("a").shift(0).iloc[0]
expected = Series({"date": now_dt}, name=result.name)
Reported by Pylint.
Line: 66
Column: 5
def test_group_shift_lose_timezone():
# GH 30134
now_dt = Timestamp.utcnow()
df = DataFrame({"a": [1, 1], "date": now_dt})
result = df.groupby("a").shift(0).iloc[0]
expected = Series({"date": now_dt}, name=result.name)
tm.assert_series_equal(result, expected)
Reported by Pylint.
pandas/core/arrays/_arrow_utils.py
18 issues
Line: 4
Column: 1
import json
import numpy as np
import pyarrow
from pandas.core.arrays.interval import VALID_CLOSED
def pyarrow_array_to_numpy_and_mask(arr, dtype):
Reported by Pylint.
Line: 64
Column: 40
return json.dumps(metadata).encode()
@classmethod
def __arrow_ext_deserialize__(cls, storage_type, serialized):
metadata = json.loads(serialized.decode())
return ArrowPeriodType(metadata["freq"])
def __eq__(self, other):
if isinstance(other, pyarrow.BaseExtensionType):
Reported by Pylint.
Line: 114
Column: 40
return json.dumps(metadata).encode()
@classmethod
def __arrow_ext_deserialize__(cls, storage_type, serialized):
metadata = json.loads(serialized.decode())
subtype = pyarrow.type_for_alias(metadata["subtype"])
closed = metadata["closed"]
return ArrowIntervalType(subtype, closed)
Reported by Pylint.
Line: 1
Column: 1
import json
import numpy as np
import pyarrow
from pandas.core.arrays.interval import VALID_CLOSED
def pyarrow_array_to_numpy_and_mask(arr, dtype):
Reported by Pylint.
Line: 48
Column: 1
return data, mask
class ArrowPeriodType(pyarrow.ExtensionType):
def __init__(self, freq):
# attributes need to be set first before calling
# super init (as that calls serialize)
self._freq = freq
pyarrow.ExtensionType.__init__(self, pyarrow.int64(), "pandas.period")
Reported by Pylint.
Line: 56
Column: 5
pyarrow.ExtensionType.__init__(self, pyarrow.int64(), "pandas.period")
@property
def freq(self):
return self._freq
def __arrow_ext_serialize__(self):
metadata = {"freq": self.freq}
return json.dumps(metadata).encode()
Reported by Pylint.
Line: 69
Column: 9
return ArrowPeriodType(metadata["freq"])
def __eq__(self, other):
if isinstance(other, pyarrow.BaseExtensionType):
return type(self) == type(other) and self.freq == other.freq
else:
return NotImplemented
def __hash__(self):
Reported by Pylint.
Line: 70
Column: 20
def __eq__(self, other):
if isinstance(other, pyarrow.BaseExtensionType):
return type(self) == type(other) and self.freq == other.freq
else:
return NotImplemented
def __hash__(self):
return hash((str(self), self.freq))
Reported by Pylint.
Line: 77
Column: 5
def __hash__(self):
return hash((str(self), self.freq))
def to_pandas_dtype(self):
import pandas as pd
return pd.PeriodDtype(freq=self.freq)
Reported by Pylint.
Line: 78
Column: 9
return hash((str(self), self.freq))
def to_pandas_dtype(self):
import pandas as pd
return pd.PeriodDtype(freq=self.freq)
# register the type with a dummy instance
Reported by Pylint.
pandas/tests/config/test_localization.py
18 issues
Line: 5
Column: 1
import locale
import os
import pytest
from pandas._config.localization import (
can_set_locale,
get_locales,
set_locale,
Reported by Pylint.
Line: 1
Column: 1
import codecs
import locale
import os
import pytest
from pandas._config.localization import (
can_set_locale,
get_locales,
Reported by Pylint.
Line: 30
Column: 1
)
def test_can_set_locale_valid_set():
# Can set the default locale.
assert can_set_locale("")
def test_can_set_locale_invalid_set():
Reported by Pylint.
Line: 32
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def test_can_set_locale_valid_set():
# Can set the default locale.
assert can_set_locale("")
def test_can_set_locale_invalid_set():
# Cannot set an invalid locale.
assert not can_set_locale("non-existent_locale")
Reported by Bandit.
Line: 35
Column: 1
assert can_set_locale("")
def test_can_set_locale_invalid_set():
# Cannot set an invalid locale.
assert not can_set_locale("non-existent_locale")
def test_can_set_locale_invalid_get(monkeypatch):
Reported by Pylint.
Line: 37
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
def test_can_set_locale_invalid_set():
# Cannot set an invalid locale.
assert not can_set_locale("non-existent_locale")
def test_can_set_locale_invalid_get(monkeypatch):
# see GH#22129
# In some cases, an invalid locale can be set,
Reported by Bandit.
Line: 40
Column: 1
assert not can_set_locale("non-existent_locale")
def test_can_set_locale_invalid_get(monkeypatch):
# see GH#22129
# In some cases, an invalid locale can be set,
# but a subsequent getlocale() raises a ValueError.
def mock_get_locale():
Reported by Pylint.
Line: 48
Column: 35
def mock_get_locale():
raise ValueError()
with monkeypatch.context() as m:
m.setattr(locale, "getlocale", mock_get_locale)
assert not can_set_locale("")
def test_get_locales_at_least_one():
Reported by Pylint.
Line: 50
Suggestion:
https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html
with monkeypatch.context() as m:
m.setattr(locale, "getlocale", mock_get_locale)
assert not can_set_locale("")
def test_get_locales_at_least_one():
# see GH#9744
assert len(_all_locales) > 0
Reported by Bandit.
Line: 53
Column: 1
assert not can_set_locale("")
def test_get_locales_at_least_one():
# see GH#9744
assert len(_all_locales) > 0
@_skip_if_only_one_locale
Reported by Pylint.
pandas/tests/indexes/interval/test_base.py
17 issues
Line: 2
Column: 1
import numpy as np
import pytest
from pandas import (
IntervalIndex,
Series,
date_range,
)
import pandas._testing as tm
Reported by Pylint.
Line: 23
Column: 16
@pytest.fixture
def simple_index(self) -> IntervalIndex:
return self._index_cls.from_breaks(range(11), closed="right")
@pytest.fixture
def index(self):
return tm.makeIntervalIndex(10)
Reported by Pylint.
Line: 29
Column: 5
def index(self):
return tm.makeIntervalIndex(10)
def create_index(self, *, closed="right"):
return IntervalIndex.from_breaks(range(11), closed=closed)
def test_repr_max_seq_item_setting(self):
# override base test: not a valid repr as we use interval notation
pass
Reported by Pylint.
Line: 32
Column: 5
def create_index(self, *, closed="right"):
return IntervalIndex.from_breaks(range(11), closed=closed)
def test_repr_max_seq_item_setting(self):
# override base test: not a valid repr as we use interval notation
pass
def test_repr_roundtrip(self):
# override base test: not a valid repr as we use interval notation
Reported by Pylint.
Line: 36
Column: 5
# override base test: not a valid repr as we use interval notation
pass
def test_repr_roundtrip(self):
# override base test: not a valid repr as we use interval notation
pass
def test_take(self, closed):
index = self.create_index(closed=closed)
Reported by Pylint.
Line: 51
Column: 5
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("klass", [list, tuple, np.array, Series])
def test_where(self, simple_index, klass):
idx = simple_index
cond = [True] * len(idx)
expected = idx
result = expected.where(klass(cond))
tm.assert_index_equal(result, expected)
Reported by Pylint.
Line: 68
Column: 17
idx = simple_index
with pytest.raises(ValueError, match="multi-dimensional indexing not allowed"):
with tm.assert_produces_warning(FutureWarning):
idx[:, None]
class TestPutmask:
@pytest.mark.parametrize("tz", ["US/Pacific", None])
def test_putmask_dt64(self, tz):
Reported by Pylint.
Line: 1
Column: 1
import numpy as np
import pytest
from pandas import (
IntervalIndex,
Series,
date_range,
)
import pandas._testing as tm
Reported by Pylint.
Line: 26
Column: 5
return self._index_cls.from_breaks(range(11), closed="right")
@pytest.fixture
def index(self):
return tm.makeIntervalIndex(10)
def create_index(self, *, closed="right"):
return IntervalIndex.from_breaks(range(11), closed=closed)
Reported by Pylint.
Line: 26
Column: 5
return self._index_cls.from_breaks(range(11), closed="right")
@pytest.fixture
def index(self):
return tm.makeIntervalIndex(10)
def create_index(self, *, closed="right"):
return IntervalIndex.from_breaks(range(11), closed=closed)
Reported by Pylint.