Skip to content

Commit

Permalink
Update some references
Browse files Browse the repository at this point in the history
  • Loading branch information
stinodego committed May 25, 2024
1 parent c4f68db commit abb59a3
Show file tree
Hide file tree
Showing 8 changed files with 15 additions and 12 deletions.
2 changes: 1 addition & 1 deletion py-polars/polars/dataframe/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -1657,7 +1657,7 @@ def to_numpy(
else:
arr = s.to_numpy(use_pyarrow=use_pyarrow)

if s.dtype == String and s.null_count() == 0:
if s.dtype == String and not s.has_nulls():
arr = arr.astype(str, copy=False)
arrays.append(arr)
struct_dtype.append((s.name, arr.dtype))
Expand Down
2 changes: 1 addition & 1 deletion py-polars/polars/datatypes/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -599,7 +599,7 @@ def __init__(self, categories: Series | Iterable[str]):
self.categories = pl.Series(name="category", dtype=String)
return

if categories.null_count() > 0:
if categories.has_nulls():
msg = "Enum categories must not contain null values"
raise TypeError(msg)

Expand Down
8 changes: 4 additions & 4 deletions py-polars/polars/series/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -1215,7 +1215,7 @@ def __deepcopy__(self, memo: None = None) -> Self:

def __contains__(self, item: Any) -> bool:
if item is None:
return self.null_count() > 0
return self.has_nulls()
return self.implode().list.contains(item).item()

def __iter__(self) -> Generator[Any, None, None]:
Expand Down Expand Up @@ -1325,7 +1325,7 @@ def __getitem__(
not item or (isinstance(item[0], int) and not isinstance(item[0], bool)) # type: ignore[redundant-expr]
):
idx_series = Series("", item, dtype=Int64)._pos_idxs(self.len())
if idx_series.null_count() > 0:
if idx_series.has_nulls():
msg = "cannot use `__getitem__` with index values containing nulls"
raise ValueError(msg)
return self._take_with_series(idx_series)
Expand Down Expand Up @@ -1398,7 +1398,7 @@ def __array__(
# Cast String types to fixed-length string to support string ufuncs
# TODO: Use variable-length strings instead when NumPy 2.0.0 comes out:
# https://numpy.org/devdocs/reference/routines.dtypes.html#numpy.dtypes.StringDType
if dtype is None and self.null_count() == 0 and self.dtype == String:
if dtype is None and not self.has_nulls() and self.dtype == String:
dtype = np.dtype("U")

if copy is None:
Expand Down Expand Up @@ -1479,7 +1479,7 @@ def __array_ufunc__(
if is_generalized_ufunc:
# Generalized ufuncs will operate on the whole array, so
# missing data can corrupt the results.
if self.null_count() > 0:
if self.has_nulls():
msg = "Can't pass a Series with missing data to a generalized ufunc, as it might give unexpected results. See https://docs.pola.rs/user-guide/expressions/missing-data/ for suggestions on how to remove or fill in missing data."
raise ComputeError(msg)
# If the input and output are the same size, e.g. "(n)->(n)" we
Expand Down
2 changes: 2 additions & 0 deletions py-polars/tests/unit/interop/numpy/test_to_numpy_df.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,12 @@ def test_to_numpy(order: IndexOrder, f_contiguous: bool, c_contiguous: bool) ->

# check string conversion; if no nulls can optimise as a fixed-width dtype
df = pl.DataFrame({"s": ["x", "y", None]})
assert df["s"].has_nulls()
assert_array_equal(
df.to_numpy(structured=True),
np.array([("x",), ("y",), (None,)], dtype=[("s", "O")]),
)
assert not df["s"].has_nulls()
assert_array_equal(
df[:2].to_numpy(structured=True),
np.array([("x",), ("y",)], dtype=[("s", "<U1")]),
Expand Down
1 change: 1 addition & 0 deletions py-polars/tests/unit/interop/numpy/test_to_numpy_series.py
Original file line number Diff line number Diff line change
Expand Up @@ -473,6 +473,7 @@ def test_view() -> None:

def test_view_nulls() -> None:
s = pl.Series("b", [1, 2, None])
assert s.has_nulls()
with pytest.deprecated_call(), pytest.raises(AssertionError):
s.view()

Expand Down
2 changes: 1 addition & 1 deletion py-polars/tests/unit/operations/map/test_map_elements.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def test_map_elements_type_propagation() -> None:
.group_by("a", maintain_order=True)
.agg(
[
pl.when(pl.col("b").null_count() == 0)
pl.when(~pl.col("b").has_nulls())
.then(
pl.col("b").map_elements(
lambda s: s[0]["c"],
Expand Down
4 changes: 2 additions & 2 deletions py-polars/tests/unit/operations/test_shift.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,11 @@ def test_shift_and_fill() -> None:
out = ldf.with_columns(
pl.col("a").shift(n=-2, fill_value=pl.col("b").mean())
).collect()
assert out["a"].null_count() == 0
assert out["a"].has_nulls() is False

# use df method
out = ldf.shift(n=2, fill_value=pl.col("b").std()).collect()
assert out["a"].null_count() == 0
assert out["a"].has_nulls() is False


def test_shift_expr() -> None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def test_series_size_range(s: pl.Series) -> None:

@given(s=series(allow_null=False))
def test_series_allow_null_false(s: pl.Series) -> None:
assert s.null_count() == 0
assert s.has_nulls() is False
assert s.dtype != pl.Null


Expand All @@ -71,7 +71,7 @@ def test_series_allow_null_allowed_dtypes(s: pl.Series) -> None:
@given(s=series(allowed_dtypes=[pl.List(pl.Int8)], allow_null=False))
def test_series_allow_null_nested(s: pl.Series) -> None:
for v in s:
assert v.null_count() == 0
assert v.has_nulls() is False


@given(df=dataframes())
Expand Down Expand Up @@ -123,7 +123,7 @@ def test_dataframes_allow_null_column(df: pl.DataFrame) -> None:
)
)
def test_dataframes_allow_null_override(df: pl.DataFrame) -> None:
assert df.get_column("col0").null_count() == 0
assert df.get_column("col0").has_nulls() is False
assert 0 <= df.get_column("colx").null_count() <= df.height


Expand Down

0 comments on commit abb59a3

Please sign in to comment.