diff --git a/semeio/communication/reporter.py b/semeio/communication/reporter.py index 7c6cda1c..dc0569de 100644 --- a/semeio/communication/reporter.py +++ b/semeio/communication/reporter.py @@ -26,7 +26,7 @@ def publish(self, namespace, data): all_data.append(data) with open(output_file, "w", encoding="utf-8") as f_handle: - json.dump(all_data, f_handle) + json.dump(all_data, f_handle, default=str) def publish_msg(self, namespace, msg): output_file = self._prepare_output_file(namespace) diff --git a/semeio/workflows/correlated_observations_scaling/job_config.py b/semeio/workflows/correlated_observations_scaling/job_config.py index 3c9edb54..ae06a45d 100644 --- a/semeio/workflows/correlated_observations_scaling/job_config.py +++ b/semeio/workflows/correlated_observations_scaling/job_config.py @@ -1,4 +1,5 @@ from copy import deepcopy +from datetime import datetime import configsuite from configsuite import MetaKeys as MK @@ -21,10 +22,12 @@ def _min_length(value): @configsuite.validator_msg("Minimum value of index must be >= 0") def _min_value(value): - return value >= 0 + if isinstance(value, int): + return value >= 0 + return True -_NUM_CONVERT_MSG = "Will go through the input and try to convert to list of int" +_NUM_CONVERT_MSG = "Will go through the input and try to convert to list" @configsuite.transformation_msg(_NUM_CONVERT_MSG) @@ -33,6 +36,8 @@ def _to_int_list(value): if isinstance(value, int): return [value] if isinstance(value, (list, tuple)): + if all(isinstance(val, datetime) for val in value): + return value value = ",".join([str(x) for x in value]) return _realize_list(value) @@ -146,6 +151,15 @@ def _CALCULATE_KEYS_key_not_empty_list(content): on "FOPR", but only update the scaling on indices "50-100". """ + +@configsuite.validator_msg("int or datetime") +def _is_int_or_datetime(value): + return isinstance(value, (datetime, int)) + + +IntOrDate = configsuite.BasicType("int_or_datetime", _is_int_or_datetime) + + _KEYS_SCHEMA = { MK.ElementValidators: (_CALCULATE_KEYS_key_not_empty_list,), MK.Type: types.List, @@ -167,7 +181,7 @@ def _CALCULATE_KEYS_key_not_empty_list(content): "(1,2,4-6,14-15) ->[1, 2, 4, 5, 6, 14, 15]", MK.Content: { MK.Item: { - MK.Type: types.Integer, + MK.Type: IntOrDate, MK.ElementValidators: (_min_value,), } }, diff --git a/semeio/workflows/correlated_observations_scaling/update_scaling.py b/semeio/workflows/correlated_observations_scaling/update_scaling.py index 2b752dd0..7fb8e2c3 100644 --- a/semeio/workflows/correlated_observations_scaling/update_scaling.py +++ b/semeio/workflows/correlated_observations_scaling/update_scaling.py @@ -26,13 +26,11 @@ def _update_scaling(obs, scale_factor, obs_list): for event in obs_list: obs_vector = obs[event.key] index_list = ( - event.index - if event.index - else [x - 1 for x in obs_vector.observations.keys()] + event.index if event.index else list(obs_vector.observations.keys()) ) for step, obs_node in obs_vector.observations.items(): if obs_vector.observation_type.name == "SUMMARY_OBS": - if step - 1 in index_list: + if step in index_list: obs_node.std_scaling = scale_factor else: obs_node.std_scaling[event.active_list] = scale_factor diff --git a/semeio/workflows/spearman_correlation_job/job.py b/semeio/workflows/spearman_correlation_job/job.py index 7167bc74..60f7a625 100644 --- a/semeio/workflows/spearman_correlation_job/job.py +++ b/semeio/workflows/spearman_correlation_job/job.py @@ -37,7 +37,7 @@ def spearman_job( zip( clusters, columns.get_level_values(0), - columns.get_level_values("data_index"), + columns.get_level_values("key_index"), ) ) diff --git a/setup.py b/setup.py index 5cf0c220..b6bff9b2 100755 --- a/setup.py +++ b/setup.py @@ -52,7 +52,7 @@ ], install_requires=[ "ecl", - "ert>=5.1.0-b8", + "ert>=6.0.0-rc0", "configsuite>=0.6", "numpy", "pandas>1.3.0", diff --git a/tests/communication/unit/test_file_reporter.py b/tests/communication/unit/test_file_reporter.py index 071ffbe2..becc2c62 100644 --- a/tests/communication/unit/test_file_reporter.py +++ b/tests/communication/unit/test_file_reporter.py @@ -146,17 +146,6 @@ def test_file_reporter_publish_valid_json(data, tmpdir): assert loaded_data == [data] -def test_file_reporter_publish_invalid_json(tmpdir): - tmpdir.chdir() - namespace = "data" - data = json # The json module is not JSON serializable... - - reporter = FileReporter(os.getcwd()) - - with pytest.raises(TypeError): - reporter.publish(namespace, data) - - def test_file_reporter_publish_multiple_json(tmpdir): tmpdir.chdir() namespace = "some_data" diff --git a/tests/jobs/correlated_observations_scaling/test_integration.py b/tests/jobs/correlated_observations_scaling/test_integration.py index f377cd1e..2fdc08bd 100644 --- a/tests/jobs/correlated_observations_scaling/test_integration.py +++ b/tests/jobs/correlated_observations_scaling/test_integration.py @@ -1,5 +1,6 @@ import json import os +from datetime import datetime from unittest.mock import MagicMock import numpy as np @@ -144,7 +145,19 @@ def test_main_entry_point_history_data_calc(snake_oil_facade, config, expected_r def test_main_entry_point_history_data_calc_subset(snake_oil_facade): - config = {"CALCULATE_KEYS": {"keys": [{"key": "FOPR", "index": [10, 20]}]}} + config = { + "CALCULATE_KEYS": { + "keys": [ + { + "key": "FOPR", + "index": [ + datetime(2010, 4, 20), + datetime(2010, 7, 29), + ], + } + ] + } + } obs = snake_oil_facade.get_observations() obs_vector = obs["FOPR"] @@ -194,7 +207,20 @@ def test_main_entry_point_sum_data_update(snake_oil_facade, monkeypatch): def test_main_entry_point_shielded_data(monkeypatch): ert = LibresFacade.from_config_file("snake_oil.ert") cos_config = { - "CALCULATE_KEYS": {"keys": [{"key": "FOPR", "index": [1, 2, 3, 4, 5]}]} + "CALCULATE_KEYS": { + "keys": [ + { + "key": "FOPR", + "index": [ + datetime(2010, 1, 20), + datetime(2010, 1, 30), + datetime(2010, 2, 9), + datetime(2010, 2, 19), + datetime(2010, 3, 1), + ], + } + ] + } } obs = ert.get_observations() diff --git a/tests/jobs/correlated_observations_scaling/unit/test_config.py b/tests/jobs/correlated_observations_scaling/unit/test_config.py index 8f219ad7..37d827a5 100644 --- a/tests/jobs/correlated_observations_scaling/unit/test_config.py +++ b/tests/jobs/correlated_observations_scaling/unit/test_config.py @@ -122,7 +122,7 @@ def test_valid_config_setup(valid_config): }, [ ( - "'Will go through the input and try to convert to list of int' " + "'Will go through the input and try to convert to list' " "failed on input '[-1, 2, 3]' with error 'Elements can not be " "negative, neither singletons nor in range, got: -1'" ), diff --git a/tests/jobs/test_scale_observations/test_scale_observations.py b/tests/jobs/test_scale_observations/test_scale_observations.py index fe6ed897..ba5ef094 100644 --- a/tests/jobs/test_scale_observations/test_scale_observations.py +++ b/tests/jobs/test_scale_observations/test_scale_observations.py @@ -1,3 +1,5 @@ +from datetime import datetime + import pytest from semeio.workflows.correlated_observations_scaling.update_scaling import ( @@ -16,25 +18,28 @@ def fixture_snake_oil_obs(snake_oil_facade): return snake_oil_facade.get_observations() -@pytest.mark.parametrize("index_list", [None, [0, 1, 2, 3]]) +@pytest.mark.parametrize( + "index_list", + [None, [datetime(2010, 1, 10), datetime(2010, 1, 30), datetime(2010, 2, 9, 0, 0)]], +) def test_scale_history_summary_obs(snake_oil_obs, index_list): scale_observations(snake_oil_obs, 1.2345, [Config("FOPR", index_list)]) obs_vector = snake_oil_obs["FOPR"] - for index, node in enumerate(obs_vector): - if not index_list or index in index_list: - assert node.std_scaling == 1.2345, f"index: {index}" + for date, node in obs_vector.observations.items(): + if not index_list or date in index_list: + assert node.std_scaling == 1.2345, f"index: {date}" else: - assert node.std_scaling == 1.0, f"index: {index}" + assert node.std_scaling == 1.0, f"index: {date}" -@pytest.mark.parametrize("index_list", [None, [35]]) +@pytest.mark.parametrize("index_list", [None, [datetime(2010, 12, 26)]]) def test_scale_summary_obs(snake_oil_obs, index_list): scale_observations(snake_oil_obs, 1.2345, [Config("WOPR_OP1_36", index_list)]) obs_vector = snake_oil_obs["WOPR_OP1_36"] - node = obs_vector.observations[36] - assert node.std_scaling == 1.2345, f"index: {36}" + node = obs_vector.observations[datetime(2010, 12, 26)] + assert node.std_scaling == 1.2345, f"index: {datetime(2010, 12, 26)}" @pytest.mark.parametrize("index_list", [None, [400, 800]]) diff --git a/tests/legacy_test_data/poly_normal/poly-ies.ert b/tests/legacy_test_data/poly_normal/poly-ies.ert index 2b187d22..d53360af 100644 --- a/tests/legacy_test_data/poly_normal/poly-ies.ert +++ b/tests/legacy_test_data/poly_normal/poly-ies.ert @@ -12,7 +12,7 @@ NUM_REALIZATIONS 100 MIN_REALIZATIONS 1 GEN_KW COEFFS coeff.tmpl coeffs.json coeff_priors -GEN_DATA POLY_RES RESULT_FILE:poly_%d.out REPORT_STEPS:0 INPUT_FORMAT:ASCII +GEN_DATA POLY_RES RESULT_FILE:poly.out INSTALL_JOB poly_eval POLY_EVAL SIMULATION_JOB poly_eval