Skip to content

Commit

Permalink
Weights being processed by CRPS
Browse files Browse the repository at this point in the history
  • Loading branch information
tennlee committed Aug 23, 2023
1 parent 7499116 commit 61b16f7
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 18 deletions.
11 changes: 5 additions & 6 deletions src/scores/probability/crps_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def check_crps_cdf_inputs(
raise ValueError("Dimensions of `threshold_weight` must be a subset of dimensions of `fcst`")

if dims is not None and not set(dims).issubset(fcst.dims):
raise ValueError("`dims` must be a subset of `fcst` dimensions")
raise ValueError("`dims` must be a subset of `fcst` dimensions") # pragma: no cover

if fcst_fill_method not in ["linear", "step", "forward", "backward"]:
raise ValueError("`fcst_fill_method` must be 'linear', 'step', 'forward' or 'backward'")
Expand Down Expand Up @@ -289,9 +289,6 @@ def crps_cdf(
Journal of Business & Economic Statistics, 29(3), 411–422. http://www.jstor.org/stable/23243806
"""

if weights is not None:
raise NotImplementedError("Weights handling is coming soon")

dims = scores.utils.gather_dimensions(
fcst.dims,
obs.dims,
Expand Down Expand Up @@ -344,8 +341,10 @@ def crps_cdf(
include_components=include_components,
)

weighted = scores.functions.apply_weights(result, weights)

dims_to_collapse = scores.utils.dims_complement(result, dims=dims)
result = result.mean(dim=dims_to_collapse)
result = weighted.mean(dim=dims_to_collapse)

return result

Expand Down Expand Up @@ -540,7 +539,7 @@ def check_crps_cdf_brier_inputs(fcst, obs, threshold_dim, fcst_fill_method, dims
raise ValueError("Dimensions of `obs` must be a subset of dimensions of `fcst`")

if dims is not None and not set(dims).issubset(fcst.dims):
raise ValueError("`dims` must be a subset of `fcst` dimensions")
raise ValueError("`dims` must be a subset of `fcst` dimensions") # pragma: no cover

if fcst_fill_method not in ["linear", "step", "forward", "backward"]:
raise ValueError("`fcst_fill_method` must be 'linear', 'step', 'forward' or 'backward'")
Expand Down
6 changes: 3 additions & 3 deletions src/scores/sample_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ def continuous_observations(large_size: bool = False) -> xr.DataArray:
periods = 10

if large_size: # pragma: no cover
num_lats = 364 # pragma: no cover
num_lons = 720 # pragma: no cover
periods = 240 # pragma: no cover
num_lats = 364 # pragma: no cover - used in notebooks and tested manually
num_lons = 720 # pragma: no cover - used in notebooks and tested manually
periods = 240 # pragma: no cover - used in notebooks and tested manually

lat = np.linspace(-90, 90, num_lats)
lon = np.linspace(0, 360, num_lons)
Expand Down
9 changes: 0 additions & 9 deletions src/scores/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,12 +208,3 @@ def check_dims(xr_data, expected_dims, mode=None):
)


def create_latitude_weights(latitudes):
'''
A common way of weighting errors is to make them proportional to the amount of area
which is contained in a particular region. This is approximated by the cosine
of the latitude on an LLXY grid. Nuances not accounted for include the variation in
latitude across the region, or the irregularity of the surface of the earth.
'''
weights = np.cos(np.deg2rad(latitudes))
return weights

0 comments on commit 61b16f7

Please sign in to comment.