Skip to content

Commit

Permalink
Deprecate allow_infinities
Browse files Browse the repository at this point in the history
  • Loading branch information
stinodego committed May 13, 2024
1 parent 7ff4fcd commit b639211
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 23 deletions.
68 changes: 49 additions & 19 deletions py-polars/polars/testing/parametric/strategies/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
@st.composite
def series( # noqa: D417
draw: DrawFn,
/,
*,
name: str | SearchStrategy[str] | None = None,
dtype: PolarsDataType | None = None,
Expand All @@ -39,12 +40,11 @@ def series( # noqa: D417
max_size: int = _ROW_LIMIT,
strategy: SearchStrategy[Any] | None = None,
allow_null: bool = False,
allow_infinities: bool = True,
unique: bool = False,
chunked: bool | None = None,
allowed_dtypes: Collection[PolarsDataType] | PolarsDataType | None = None,
excluded_dtypes: Collection[PolarsDataType] | PolarsDataType | None = None,
null_probability: float | None = None,
**kwargs: Any,
) -> Series:
"""
Hypothesis strategy for producing polars Series.
Expand All @@ -69,8 +69,6 @@ def series( # noqa: D417
supports overriding the default strategy for the given dtype.
allow_null : bool
Allow nulls as possible values.
allow_infinities : bool, optional
optionally disallow generation of +/-inf values for floating-point dtypes.
unique : bool, optional
indicate whether Series values should all be distinct.
chunked : bool, optional
Expand All @@ -80,6 +78,10 @@ def series( # noqa: D417
when automatically generating Series data, allow only these dtypes.
excluded_dtypes : {list,set}, optional
when automatically generating Series data, exclude these dtypes.
**kwargs
Additional keyword arguments that are passed to the underlying data generation
strategies.
null_probability : float
Percentage chance (expressed between 0.0 => 1.0) that any Series value is null.
This is applied independently of any None values generated by the underlying
Expand All @@ -88,6 +90,12 @@ def series( # noqa: D417
.. deprecated:: 0.20.26
Use `allow_null` instead.
allow_infinities : bool, optional
Allow generation of +/-inf values for floating-point dtypes.
.. deprecated:: 0.20.26
Use `allow_infinity` instead.
Notes
-----
In actual usage this is deployed as a unit test decorator, providing a strategy
Expand Down Expand Up @@ -121,8 +129,14 @@ def series( # noqa: D417
["xx"]
]
"""
if null_probability is not None:
allow_null = _handle_null_probability_deprecation(null_probability) # type: ignore[assignment]
if (null_prob := kwargs.pop("null_probability", None)) is not None:
allow_null = _handle_null_probability_deprecation(null_prob) # type: ignore[assignment]
if (allow_inf := kwargs.pop("allow_infinities", None)) is not None:
issue_deprecation_warning(
"`allow_infinities` is deprecated. Use `allow_infinity` instead.",
version="0.20.26",
)
kwargs["allow_infinity"] = allow_inf

if isinstance(allowed_dtypes, (DataType, DataTypeClass)):
allowed_dtypes = [allowed_dtypes]
Expand Down Expand Up @@ -160,8 +174,8 @@ def series( # noqa: D417
if strategy is None:
strategy = data(
dtype, # type: ignore[arg-type]
allow_infinity=allow_infinities,
allow_null=allow_null,
**kwargs,
)

values = draw(
Expand Down Expand Up @@ -200,10 +214,9 @@ def dataframes(
chunked: bool | None = None,
include_cols: Sequence[column] | column | None = None,
allow_null: bool | Mapping[str, bool] = False,
allow_infinities: bool = True,
allowed_dtypes: Collection[PolarsDataType] | PolarsDataType | None = None,
excluded_dtypes: Collection[PolarsDataType] | PolarsDataType | None = None,
null_probability: float | dict[str, float] | None = None,
**kwargs: Any,
) -> SearchStrategy[DataFrame]: ...


Expand All @@ -220,16 +233,16 @@ def dataframes(
chunked: bool | None = None,
include_cols: Sequence[column] | column | None = None,
allow_null: bool | Mapping[str, bool] = False,
allow_infinities: bool = True,
allowed_dtypes: Collection[PolarsDataType] | PolarsDataType | None = None,
excluded_dtypes: Collection[PolarsDataType] | PolarsDataType | None = None,
null_probability: float | dict[str, float] | None = None,
**kwargs: Any,
) -> SearchStrategy[LazyFrame]: ...


@st.composite
def dataframes( # noqa: D417
draw: DrawFn,
/,
cols: int | column | Sequence[column] | None = None,
*,
lazy: bool = False,
Expand All @@ -241,10 +254,9 @@ def dataframes( # noqa: D417
chunked: bool | None = None,
include_cols: Sequence[column] | column | None = None,
allow_null: bool | Mapping[str, bool] = False,
allow_infinities: bool = True,
allowed_dtypes: Collection[PolarsDataType] | PolarsDataType | None = None,
excluded_dtypes: Collection[PolarsDataType] | PolarsDataType | None = None,
null_probability: float | dict[str, float] | None = None,
**kwargs: Any,
) -> DataFrame | LazyFrame:
"""
Hypothesis strategy for producing polars DataFrames or LazyFrames.
Expand Down Expand Up @@ -279,19 +291,30 @@ def dataframes( # noqa: D417
(if any present).
allow_null : bool or Mapping[str, bool]
Allow nulls as possible values.
allow_infinities : bool, optional
optionally disallow generation of +/-inf values for floating-point dtypes.
allowed_dtypes : {list,set}, optional
when automatically generating data, allow only these dtypes.
excluded_dtypes : {list,set}, optional
when automatically generating data, exclude these dtypes.
**kwargs
Additional keyword arguments that are passed to the underlying data generation
strategies.
null_probability : {float, dict[str,float]}, optional
percentage chance (expressed between 0.0 => 1.0) that a generated value is
None. this is applied independently of any None values generated by the
underlying strategy, and can be applied either on a per-column basis (if
given as a `{col:pct}` dict), or globally. if null_probability is defined
on a column, it takes precedence over the global value.
.. deprecated:: 0.20.26
Use `allow_null` instead.
allow_infinities : bool, optional
optionally disallow generation of +/-inf values for floating-point dtypes.
.. deprecated:: 0.20.26
Use `allow_infinity` instead.
Notes
-----
In actual usage this is deployed as a unit test decorator, providing a strategy
Expand Down Expand Up @@ -347,8 +370,14 @@ def dataframes( # noqa: D417
│ 575050513 ┆ NaN │
└───────────┴────────────┘
"""
if null_probability is not None:
allow_null = _handle_null_probability_deprecation(null_probability)
if (null_prob := kwargs.pop("null_probability", None)) is not None:
allow_null = _handle_null_probability_deprecation(null_prob)
if (allow_inf := kwargs.pop("allow_infinities", None)) is not None:
issue_deprecation_warning(
"`allow_infinities` is deprecated. Use `allow_infinity` instead.",
version="0.20.26",
)
kwargs["allow_infinity"] = allow_inf

if isinstance(include_cols, column):
include_cols = [include_cols]
Expand Down Expand Up @@ -389,12 +418,12 @@ def dataframes( # noqa: D417
dtype=c.dtype,
size=size,
allow_null=c.allow_null, # type: ignore[arg-type]
allow_infinities=allow_infinities,
strategy=c.strategy,
unique=c.unique,
chunked=None if chunked is None else False,
allowed_dtypes=allowed_dtypes,
excluded_dtypes=excluded_dtypes,
**kwargs,
)
)
for c in cols
Expand All @@ -416,7 +445,7 @@ def dataframes( # noqa: D417
@dataclass
class column:
"""
Define a column for use with the @dataframes strategy.
Define a column for use with the `dataframes` strategy.
Parameters
----------
Expand All @@ -430,6 +459,7 @@ class column:
Allow nulls as possible values.
unique : bool, optional
flag indicating that all values generated for the column should be unique.
null_probability : float, optional
percentage chance (expressed between 0.0 => 1.0) that a generated value is
None. this is applied independently of any None values generated by the
Expand Down
23 changes: 19 additions & 4 deletions py-polars/tests/parametric/test_testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,11 +169,9 @@ def test_chunking(

@given(
df=dataframes(
allowed_dtypes=[pl.Float32, pl.Float64],
allow_infinities=False,
max_cols=4,
allowed_dtypes=[pl.Float32, pl.Float64], max_cols=4, allow_infinity=False
),
s=series(dtype=pl.Float64, allow_infinities=False),
s=series(dtype=pl.Float64, allow_infinity=False),
)
def test_infinities(
df: pl.DataFrame,
Expand Down Expand Up @@ -226,3 +224,20 @@ def test_sequence_strategies(df: pl.DataFrame) -> None:
def test_column_invalid_probability() -> None:
with pytest.deprecated_call(), pytest.raises(InvalidArgument):
column("col", null_probability=2.0)


@pytest.mark.hypothesis()
def test_column_null_probability_deprecated() -> None:
with pytest.deprecated_call():
col = column("col", allow_null=False, null_probability=0.5)
assert col.null_probability == 0.5
assert col.allow_null is True # null_probability takes precedence


@given(st.data())
def test_allow_infinities_deprecated(data: st.DataObject) -> None:
with pytest.deprecated_call():
strategy = series(dtype=pl.Float64, allow_infinities=False)
s = data.draw(strategy)

assert all(v not in (float("inf"), float("-inf")) for v in s)

0 comments on commit b639211

Please sign in to comment.