Skip to content

Commit

Permalink
Merge pull request #113 from ACME-Climate/save_metrics
Browse files Browse the repository at this point in the history
Save metrics
  • Loading branch information
zshaheen authored Dec 5, 2017
2 parents 616644d + c214b9b commit 71d1830
Show file tree
Hide file tree
Showing 4 changed files with 218 additions and 16 deletions.
3 changes: 1 addition & 2 deletions acme_diags/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@

__version__ = 'v1.0.0'
__version__ = 'v1.1.0'
4 changes: 4 additions & 0 deletions acme_diags/acme_diags_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@
from acme_diags.acme_parser import ACMEParser
from acme_diags.acme_viewer import create_viewer
from acme_diags.driver.utils import get_set_name
import json
import collections
import csv


def _get_default_diags(set_num, dataset):
Expand Down Expand Up @@ -131,5 +134,6 @@ def run_diag(parameters):
os.makedirs(pth)

create_viewer(pth, parameters, parameters[0].output_format[0])

else:
print('There was not a single valid diagnostics run, no viewer created')
174 changes: 173 additions & 1 deletion acme_diags/acme_viewer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,12 @@
import datetime
import shutil
import collections
import copy
import csv
import json
from bs4 import BeautifulSoup
import acme_diags
from cdp.cdp_viewer import OutputViewer
import acme_diags
from acme_diags.driver.utils import get_set_name

# Dict of
Expand All @@ -24,6 +27,8 @@
# needed so we can have a cols in order of ANN, DJF, MAM, JJA, SON
ROW_INFO = collections.OrderedDict()

# A similar dict for creating the lat-lon tables
LAT_LON_TABLE_INFO = collections.OrderedDict()

def _copy_acme_logo(root_dir):
"""Copy over ACME_Logo.png to root_dir/viewer"""
Expand Down Expand Up @@ -127,7 +132,10 @@ def _extras(root_dir, parameters):
parameters[0].test_name, dt, path)
h1_to_h3(f)

_edit_table_html(root_dir)
_add_lat_lon_table_to_viewer_index(root_dir)


def _add_pages_and_top_row(viewer, parameters):
"""Add the page and columns of the page"""
set_to_seasons = collections.OrderedDict() # dict of {set: [seasons]}
Expand Down Expand Up @@ -169,7 +177,163 @@ def _better_page_name(old_name):
return 'CloudTopHeight-Tau joint histograms'
else:
return old_name

def _add_to_lat_lon_metrics_table(metrics_path, season, row_name):
"""Add the metrics for the current season and row_name to the lat-lon table"""
with open(metrics_path + '.json') as json_file:
metrics_dict = json.load(json_file)

if season not in LAT_LON_TABLE_INFO:
LAT_LON_TABLE_INFO[season] = collections.OrderedDict()
if row_name not in LAT_LON_TABLE_INFO[season]:
LAT_LON_TABLE_INFO[season][row_name] = collections.OrderedDict()
LAT_LON_TABLE_INFO[season][row_name]['metrics'] = metrics_dict

def _create_csv_from_dict(output_dir, season):
"""Create a csv for a season in LAT_LON_TABLE_INFO in output_dir and return the path to it"""
#table_path = os.path.abspath(os.path.join(output_dir, season + '_metrics_table.csv'))
table_path = os.path.join(output_dir, season + '_metrics_table.csv')

col_names = ['Variables', 'Unit', 'Model mean', 'Obs mean', 'Mean Bias', 'RMSE', 'correlation']

with open(table_path, 'w') as table_csv:
writer=csv.writer(table_csv, delimiter=',', lineterminator='\n', quoting=csv.QUOTE_NONE)
writer.writerow(col_names)
for key, metrics_dic in LAT_LON_TABLE_INFO[season].items():
metrics = metrics_dic['metrics']
row = [key, metrics['unit'], round(metrics['test_regrid']['mean'],3), round(metrics['ref_regrid']['mean'],3), round(metrics['test_regrid']['mean'] - metrics['ref_regrid']['mean'],3), round(metrics['misc']['rmse'],3), round(metrics['misc']['corr'],3)]
writer.writerow(row)

return table_path

def _cvs_to_html(csv_path, season):
"""Convert the csv for a season located at csv_path to an HTML, returning the path to the HTML"""
html_path = csv_path.replace('csv', 'html')

with open(html_path, 'w') as htmlfile:
htmlfile.write('<p><th><b>{} Mean </b></th></p>'.format(season))
htmlfile.write('<table>')

with open(csv_path) as csv_file:
read_csv = csv.reader(csv_file)

# generate table contents
for num, row in enumerate(read_csv):

# write the header row, assuming the first row in csv contains the header
if num == 0:
htmlfile.write('<tr>')
for column in row:
htmlfile.write('<th>{}</th>'.format(column))
htmlfile.write('</tr>')

# write all other rows
else:
htmlfile.write('<tr><div style="width: 50px">')
for column in row:
htmlfile.write('<td>{}</td>'.format(column))
htmlfile.write('</div></tr>')

htmlfile.write('</table>')

return html_path


def _add_html_to_col(season, season_path, html_path):
"""Since the output viewer doesn't support html images, do this hack.
For the col in the html at html_path, insert the link to col_path."""
# Change:
# <tr class="output-row">
# <!-- ... -->
# <td colspan="1">
# <!-- what needs to be changed -->
# </td>
# <!-- ... -->
# </tr>
# to:
# <tr class="output-row">
# <!-- ... -->
# <td colspan="1">
# <a href="{season_path}"> {season} </a> <!-- this was changed -->
# </td>
# <!-- ... -->
# </tr>

soup = BeautifulSoup(open(html_path), "lxml")

for tr in soup.find_all("tr", {"class": "output-row"}):
index = ['All variables', 'ANN', 'DJF', 'MAM', 'JJA', 'SON'].index(season)
cols = tr.find_all("td") # the cols are ['All variables', 'ANN', 'DJF', 'MAM', 'JJA', 'SON']
td = cols[index] # get the HTML element related to the season

url = os.path.join('..', '..', '..', season_path)
a = soup.new_tag("a", href=url)
a.append(season)

td.string = ''
td.append(a)

html = soup.prettify("utf-8")
with open(html_path, "wb") as f:
f.write(html)

def _edit_table_html(root_dir):
"""After the viewer is created, edit the table html to insert the custom htmls"""
for s in ['ANN', 'DJF', 'MAM', 'JJA', 'SON']:
if s in LAT_LON_TABLE_INFO:
_add_html_to_col(s, LAT_LON_TABLE_INFO[s]['html_path'], os.path.join(root_dir, 'table', 'index.html'))

def _create_lat_lon_table_index(viewer, root_dir):
"""Create an index in the viewer that links the individual htmls for the lat-lon table."""
seasons = ['ANN', 'DJF', 'MAM', 'JJA', 'SON']
viewer.add_page('Table', seasons)
viewer.add_group('Summary Table')
viewer.add_row('All variables')

for s in seasons:
if s in LAT_LON_TABLE_INFO:
viewer.add_col(LAT_LON_TABLE_INFO[s]['html_path'], is_file=False, title=s)
else:
viewer.add_col('-----', is_file=True, title='-----')

def _add_lat_lon_table_to_viewer_index(root_dir):
"""Move the link to Table next to the link to Latitude-Longitude contour maps"""
index_page = os.path.join(root_dir, 'index.html')
soup = BeautifulSoup(open(index_page), "lxml")

# append the new tag underneath the old one, so add it to the parent of the old one
td_to_move = None
for tr in soup.find_all("tr"):
for td in tr.find_all("td"):
for a in td.find_all("a"):
if 'table' in a['href']:
td_to_move = copy.deepcopy(td)
tr.decompose()

if td_to_move:
for tr in soup.find_all("tr"):
for td in tr.find_all("td"):
for a in td.find_all("a"):
if _better_page_name('lat_lon') in a.string:
td.append(td_to_move)

html = soup.prettify("utf-8")
with open(index_page, "wb") as f:
f.write(html)

def generate_lat_lon_metrics_table(viewer, root_dir):
"""For each season in LAT_LON_TABLE_INFO, create a csv, convert it to an html and append that html to the viewer."""
table_dir = os.path.join(root_dir, 'table-data') # output_dir/viewer/table-data

if not os.path.exists(table_dir):
os.mkdir(table_dir)

for season in LAT_LON_TABLE_INFO:
csv_path = _create_csv_from_dict(table_dir, season)
html_path = _cvs_to_html(csv_path, season)
LAT_LON_TABLE_INFO[season]['html_path'] = html_path

_create_lat_lon_table_index(viewer, root_dir)

def create_viewer(root_dir, parameters, ext):
"""Based of the parameters, find the files with
Expand Down Expand Up @@ -207,6 +371,13 @@ def create_viewer(root_dir, parameters, ext):
ref_name, var, int(plev), season, region)
row_name_and_fnm.append((row_name, fnm))

if set_num in ['lat_lon', '5']:
metrics_path = os.path.join(parameter.results_dir, '{}'.format(set_num), parameter.case_id, fnm)
if os.path.exists(metrics_path + '.json'):
_add_to_lat_lon_metrics_table(metrics_path, season, row_name)
else:
print('JSON does not exist: {}'.format(metrics_path + '.json'))
continue
for row_name, fnm in row_name_and_fnm:
if parameter.case_id not in ROW_INFO[set_num]:
ROW_INFO[set_num][parameter.case_id] = collections.OrderedDict(
Expand Down Expand Up @@ -255,5 +426,6 @@ def create_viewer(root_dir, parameters, ext):
viewer.add_col(fnm + '.' + ext, is_file=True,
title=col_season, other_files=formatted_files)

generate_lat_lon_metrics_table(viewer, root_dir)
viewer.generate_viewer(prompt_user=False)
_extras(root_dir, parameters)
53 changes: 40 additions & 13 deletions acme_diags/driver/lat_lon_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,34 +6,44 @@
from acme_diags.derivations import acme
from acme_diags.metrics import rmse, corr, min_cdms, max_cdms, mean
from acme_diags.driver import utils
from acme_diags.driver.utils import get_output_dir
import os
import sys
import json


def create_metrics(ref, test, ref_regrid, test_regrid, diff):
"""Creates the mean, max, min, rmse, corr in a dictionary"""
metrics_dict = {}
metrics_dict['ref'] = {
'min': min_cdms(ref),
'max': max_cdms(ref),
'mean': mean(ref)
'min': float(min_cdms(ref)),
'max': float(max_cdms(ref)),
'mean': float(mean(ref))
}
metrics_dict['ref_regrid'] = {
'min': float(min_cdms(ref_regrid)),
'max': float(max_cdms(ref_regrid)),
'mean': float(mean(ref_regrid))
}
metrics_dict['test'] = {
'min': min_cdms(test),
'max': max_cdms(test),
'mean': mean(test)
'min': float(min_cdms(test)),
'max': float(max_cdms(test)),
'mean': float(mean(test))
}
metrics_dict['test_regrid'] = {
'min': float(min_cdms(test_regrid)),
'max': float(max_cdms(test_regrid)),
'mean': float(mean(test_regrid))
}

metrics_dict['diff'] = {
'min': min_cdms(diff),
'max': max_cdms(diff),
'mean': mean(diff)
'min': float(min_cdms(diff)),
'max': float(max_cdms(diff)),
'mean': float(mean(diff))
}
metrics_dict['misc'] = {
'rmse': rmse(test_regrid, ref_regrid),
'corr': corr(test_regrid, ref_regrid)
'rmse': float(rmse(test_regrid, ref_regrid)),
'corr': float(corr(test_regrid, ref_regrid))
}

return metrics_dict


Expand Down Expand Up @@ -184,6 +194,14 @@ def run_diag(parameter):
metrics_dict = create_metrics(
mv2_domain, mv1_domain, mv2_reg, mv1_reg, diff)

metrics_dict['unit'] = mv1_reg.units

fnm = os.path.join(get_output_dir(
parameter.current_set, parameter), parameter.output_file)
with open(fnm + '.json' , 'w') as outfile:
json.dump(metrics_dict,outfile)
print('Metrics saved in: ' + fnm + '.json')

parameter.var_region = region
plot(parameter.current_set, mv2_domain,
mv1_domain, diff, metrics_dict, parameter)
Expand Down Expand Up @@ -232,6 +250,15 @@ def run_diag(parameter):
diff = mv1_reg - mv2_reg
metrics_dict = create_metrics(
mv2_domain, mv1_domain, mv2_reg, mv1_reg, diff)

metrics_dict['unit'] = mv1_reg.units

fnm = os.path.join(get_output_dir(
parameter.current_set, parameter), parameter.output_file)
with open(fnm + '.json' , 'w') as outfile:
json.dump(metrics_dict,outfile)
print('Metrics saved in: ' + fnm + '.json')

parameter.var_region = region
plot(parameter.current_set, mv2_domain,
mv1_domain, diff, metrics_dict, parameter)
Expand Down

0 comments on commit 71d1830

Please sign in to comment.