Skip to content

Commit

Permalink
Merge branch 'test_dss'
Browse files Browse the repository at this point in the history
  • Loading branch information
christian34 committed Sep 13, 2023
2 parents a5121cc + ac1e54c commit 229bb30
Show file tree
Hide file tree
Showing 3 changed files with 100 additions and 49 deletions.
34 changes: 17 additions & 17 deletions src/agroservices/ipm/data/model_input_psilarobse.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,26 +2,26 @@
"modelId": "PSILAROBSE",
"configParameters": {
"timeZone": "Europe/Oslo",
"startDateCalculation": "2020-05-01",
"endDateCalculation": "2020-05-10",
"startDateCalculation": "2023-05-01",
"endDateCalculation": "2023-06-05",
"fieldObservations": [
{
"location": {
"type": "Point",
"coordinates": [
"11.025635",
"59.715791"
]
"fieldObservation": {
"location": {
"type": "Point",
"coordinates": [
10.781989,
59.660468
]
},
"time": "2023-05-28T18:00:00+02:00",
"pestEPPOCode": "PSILRO",
"cropEPPOCode": "DAUCS"
},
"time": "2020-05-05T12:00:00Z",
"pestEPPOCode": "SEPTAP",
"cropEPPOCode": "APUGD"
}
],
"fieldObservationQuantifications": [
{
"trapCountCropEdge": 22,
"trapCountCropInside": 2
"quantification": {
"trapCountCropEdge": 2,
"trapCountCropInside": 55
}
}
]
}
Expand Down
55 changes: 29 additions & 26 deletions src/agroservices/ipm/fakers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
from jsf import JSF
from agroservices.ipm.datadir import country_mapping


Geojson_point ="""{{
Geojson_point = """{{
"type": "FeatureCollection",
"features": [
{{
Expand All @@ -26,6 +25,7 @@
]
}}"""


def weather_adapter_params(weather_adapter,
parameters=None,
time_start=None,
Expand Down Expand Up @@ -109,7 +109,8 @@ def weather_adapter_params(weather_adapter,
return fake


def weather_data(parameters=(1001, 1002), time_start=None, time_end=None, interval=3600, length=3, longitude =None, latitude=None, altitude=None, data=None):
def weather_data(parameters=(1001, 1002), time_start=None, time_end=None, interval=3600, length=3, longitude=None,
latitude=None, altitude=None, data=None):
"""generate a dict complying IPMDecision weatherData schema"""
fake = {}

Expand Down Expand Up @@ -211,7 +212,7 @@ def model_weather_data(model, time_start=None, time_end=None, length=3):
start, end = default_weather_period(model)
if time_start is None:
if time_end is not None:
start = None # let use length
start = None # let use length
else:
start = time_start
if time_end is None:
Expand Down Expand Up @@ -239,10 +240,13 @@ def model_field_observations(model, quantifications, latitude=None, longitude=No
pest = random.sample(model['pests'], 1)[0]
if crop is None:
crop = random.sample(model['crops'], 1)[0]
field_obs = [{'location': location,
'time': time[i],
'pestEPPOCode': pest,
'cropEPPOCode': crop} for i in range(length)]
field_obs = [{'fieldObservation': {'location': location,
'time': time[i],
'pestEPPOCode': pest,
'cropEPPOCode': crop
},
'quantification': quantifications[i]
} for i in range(length)]
return field_obs


Expand All @@ -265,13 +269,14 @@ def set_default(fake_value, schema):

def set_all_required(schema):
schema['required'] = list(schema['properties'].keys())
for k,v in schema['properties'].items():
for k, v in schema['properties'].items():
if v['type'] == 'object':
schema['properties'][k] = set_all_required(v)
return schema


def input_data(model, weather_data=None, field_observations=None, quantifications = None, requires_all=True, check_default=True):
def input_data(model, weather_data=None, field_observations=None, requires_all=True,
check_default=True):
if model['execution']['type'] == 'LINK':
return None
else:
Expand Down Expand Up @@ -302,13 +307,13 @@ def input_data(model, weather_data=None, field_observations=None, quantification
fakeloc.append(prop)
break
if fieldobs:
fieldloc['properties']['fieldObservations'] = {'type': 'string',
'pattern': '^FIELD_OBSERVATION$'}
fieldloc['properties']['fieldObservationQuantifications']['minItems'] = 1
for w in ('fieldObservations', 'fieldObservationQuantifications'):
if w not in fieldloc['required']:
fieldloc['required'].append(w)

fieldloc['properties']['fieldObservations']['minItems'] = 1
if 'fieldObservations' not in fieldloc['required']:
fieldloc['required'].append('fieldObservations')
fieldloc['properties']['fieldObservations']['items']['properties']['fieldObservation'] = {
'type': 'string',
'pattern': '^FIELD_OBSERVATION$'}
fieldloc['properties']['fieldObservations']['items']['required'] = ['fieldObservation', 'quantification']

if requires_all:
if input_schema['type'] == 'object':
Expand All @@ -318,7 +323,7 @@ def input_data(model, weather_data=None, field_observations=None, quantification
fake = jsf_faker.generate()

if check_default:
for k,v in fake.items():
for k, v in fake.items():
fake[k] = set_default(v, input_schema['properties'][k])

if weather:
Expand All @@ -332,23 +337,21 @@ def input_data(model, weather_data=None, field_observations=None, quantification
fields = bound['value'].split('.')
for field in fields[:-1]:
d = d[field]
assert fields[-1] in d, 'weather_data_period_' + w + ' not found in input_schema properties, but refered in model input to be there (use FIXED _DATE instead)'
assert fields[
-1] in d, 'weather_data_period_' + w + ' not found in input_schema properties, but refered in model input to be there (use FIXED _DATE instead)'
d[fields[-1]] = weather_data['time' + w[0].upper() + w[1:]]
if model['input']['weather_parameters'][0]['interval'] > 3600:
d[fields[-1]] = d[fields[-1]][:10] #datetime -> date
d[fields[-1]] = d[fields[-1]][:10] # datetime -> date
break
if fieldobs:
d = fake
for prop in fakeloc:
d = d[prop]
if quantifications is not None:
d['fieldObservationQuantifications'] = quantifications
if field_observations is None:
field_observations = model_field_observations(model, d['fieldObservationQuantifications'])
quantifications = [item['quantification'] for item in d['fieldObservations']]
field_observations = model_field_observations(model, quantifications)
d['fieldObservations'] = field_observations

return fake


#TODO: add interpreters for model meta for wralea

# TODO: add interpreters for model meta for wralea
60 changes: 54 additions & 6 deletions src/agroservices/ipm/fixes.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,13 @@ def fix_get_weatherdatasource(resp):
resp['ie.gov.data']['endpoint'] = '{WEATHER_API_URL}/rest/weatheradapter/meteireann/'
return resp


def fix_load_model(dss, model):
if dss == 'adas.datamanipulation':
if model['id'] == 'LeafWetnessDuration_RH':
# Use json schema (and not string replace) to declare inputs, similarly to all other IPM endpoints
model['execution']['input_schema'] = {'type': 'object', 'required': ['RH'], 'properties': {'RH': {'type': 'number', 'minimum': 0, 'maximum': 100}}}
model['execution']['input_schema'] = {'type': 'object', 'required': ['RH'], 'properties': {
'RH': {'type': 'number', 'minimum': 0, 'maximum': 100}}}
model['execution']['endpoint'] = model['execution']['endpoint'][:-8]
if dss == 'adas.dss':
if model['id'] == 'MELIAE':
Expand All @@ -21,34 +23,80 @@ def fix_load_model(dss, model):
model['execution']['input_schema']['properties']['growthStage']['maximum'] = 59
if model['id'] == 'DEROAG_Cereals':
# observationClass requires at least one item
model['execution']['input_schema']['properties']['configParameters']['properties']['observationClass']['minItems'] = 1
model['execution']['input_schema']['properties']['configParameters']['properties']['observationClass'][
'minItems'] = 1
if dss == 'no.nibio.vips':
if model['id'] == 'PSILAROBSE':
# field obs object misses type
model['execution']['input_schema']['definitions']['fieldObs_PSILRO']['type']='object'
model['execution']['input_schema']['definitions']['fieldObs_PSILRO']['type'] = 'object'
if model['id'] == 'DELIARFOBS':
# field obs object misses type
model['execution']['input_schema']['definitions']['fieldObs_HYLERA']['type'] = 'object'
model['execution']['input_schema']['definitions']['fieldObs_HYLERA']['required'] = list(
model['execution']['input_schema']['definitions']['fieldObs_HYLERA']['properties'].keys())
# old style field obs still declared in schema
props = model['execution']['input_schema']['properties']['configParameters']['properties']
quantifications = props.pop('fieldObservationQuantifications')
props['fieldObservations'] = {"title": "Field observations",
"type": "array",
"items": {
"type": "object",
"title": "Field observation",
"properties": {
"fieldObservation": {
"title": "Generic field observation information",
"$ref": "https://platform.ipmdecisions.net/api/dss/rest/schema/fieldobservation"
},
"quantification":
quantifications[
'items']['oneOf'][
0]
}
}
}
if model['id'] == 'SEPAPIICOL':
# field obs object misses type
model['execution']['input_schema']['definitions']['fieldObs_SEPTAP']['type'] = 'object'
model['execution']['input_schema']['definitions']['fieldObs_SEPTAP']['required'] = list(model['execution']['input_schema']['definitions']['fieldObs_SEPTAP']['properties'].keys())
model['execution']['input_schema']['definitions']['fieldObs_SEPTAP']['required'] = list(
model['execution']['input_schema']['definitions']['fieldObs_SEPTAP']['properties'].keys())
# old style field obs still declared in schema
props = model['execution']['input_schema']['properties']['configParameters']['properties']
quantifications = props.pop('fieldObservationQuantifications')
props['fieldObservations'] = {"title": "Field observations",
"type": "array",
"items": {
"type": "object",
"title": "Field observation",
"properties": {
"fieldObservation": {
"title": "Generic field observation information",
"$ref": "https://platform.ipmdecisions.net/api/dss/rest/schema/fieldobservation"
},
"quantification":
quantifications[
'items']['oneOf'][
0]
}
}
}
if model['id'] == 'BREMIALACT':
# start/end period are not in input_schema_properties
for w in ('start', 'end'):
model['input']['weather_data_period_' + w][0]['determined_by'] = 'FIXED_DATE'
# end point is wrong, bug has been reported, to be check in newer version
model['execution']['endpoint'] = 'https://coremanager.vips.nibio.no/models/BREMIALACT/run/ipmd'
elif dss == 'dk.seges':
# 'weatherData' is misspelled
if 'WeatherData' in model['execution']['input_schema']['properties']:
model['execution']['input_schema']['properties']['weatherData'] = model['execution']['input_schema']['properties'].pop('WeatherData')
model['execution']['input_schema']['properties']['weatherData'] = model['execution']['input_schema'][
'properties'].pop('WeatherData')
# add boundaries for GrowthStages
if 'GrowthStage' in model['execution']['input_schema']['properties']:
model['execution']['input_schema']['properties']['GrowthStage']['minimum'] = 0
model['execution']['input_schema']['properties']['GrowthStage']['maximum'] = 999
return model


def fix_prior_load_model(dss, model):
if dss == 'adas.datamanipulation':
if model['id'] == 'CIBSEsingleday':
Expand Down Expand Up @@ -79,4 +127,4 @@ def fix_prior_load_model(dss, model):
items[4] = items[4].replace('TemperatureClasses', 'Relative humidity')
items[8] = items[8][:-1]
model['execution']['input_schema'] = '\n'.join(items)
return model
return model

0 comments on commit 229bb30

Please sign in to comment.