From 96284c741b2e0f2547de96acbff5e77274c84729 Mon Sep 17 00:00:00 2001 From: chengzhuzhang Date: Thu, 2 Nov 2017 16:38:24 -0700 Subject: [PATCH 01/11] saved metrics as json --- acme_diags/driver/lat_lon_driver.py | 49 +++++++++++++++++++++-------- 1 file changed, 36 insertions(+), 13 deletions(-) diff --git a/acme_diags/driver/lat_lon_driver.py b/acme_diags/driver/lat_lon_driver.py index 513bc4797..fe5452209 100755 --- a/acme_diags/driver/lat_lon_driver.py +++ b/acme_diags/driver/lat_lon_driver.py @@ -6,34 +6,44 @@ from acme_diags.derivations import acme from acme_diags.metrics import rmse, corr, min_cdms, max_cdms, mean from acme_diags.driver import utils +from acme_diags.driver.utils import get_output_dir import os import sys +import json def create_metrics(ref, test, ref_regrid, test_regrid, diff): """Creates the mean, max, min, rmse, corr in a dictionary""" metrics_dict = {} metrics_dict['ref'] = { - 'min': min_cdms(ref), - 'max': max_cdms(ref), - 'mean': mean(ref) + 'min': float(min_cdms(ref)), + 'max': float(max_cdms(ref)), + 'mean': float(mean(ref)) + } + metrics_dict['ref_regrid'] = { + 'min': float(min_cdms(ref_regrid)), + 'max': float(max_cdms(ref_regrid)), + 'mean': float(mean(ref_regrid)) } metrics_dict['test'] = { - 'min': min_cdms(test), - 'max': max_cdms(test), - 'mean': mean(test) + 'min': float(min_cdms(test)), + 'max': float(max_cdms(test)), + 'mean': float(mean(test)) + } + metrics_dict['test_regrid'] = { + 'min': float(min_cdms(test_regrid)), + 'max': float(max_cdms(test_regrid)), + 'mean': float(mean(test_regrid)) } - metrics_dict['diff'] = { - 'min': min_cdms(diff), - 'max': max_cdms(diff), - 'mean': mean(diff) + 'min': float(min_cdms(diff)), + 'max': float(max_cdms(diff)), + 'mean': float(mean(diff)) } metrics_dict['misc'] = { - 'rmse': rmse(test_regrid, ref_regrid), - 'corr': corr(test_regrid, ref_regrid) + 'rmse': float(rmse(test_regrid, ref_regrid)), + 'corr': float(corr(test_regrid, ref_regrid)) } - return metrics_dict @@ -171,6 +181,12 @@ def run_diag(parameter): metrics_dict = create_metrics( mv2_domain, mv1_domain, mv2_reg, mv1_reg, diff) + fnm = os.path.join(get_output_dir( + parameter.current_set, parameter), parameter.output_file) + with open(fnm + '.json' , 'w') as outfile: + json.dump(metrics_dict,outfile) + print('Metrics saved in: ' + fnm + '.json') + parameter.var_region = region plot(parameter.current_set, mv2_domain, mv1_domain, diff, metrics_dict, parameter) @@ -219,6 +235,13 @@ def run_diag(parameter): diff = mv1_reg - mv2_reg metrics_dict = create_metrics( mv2_domain, mv1_domain, mv2_reg, mv1_reg, diff) + + fnm = os.path.join(get_output_dir( + parameter.current_set, parameter), parameter.output_file) + with open(fnm + '.json' , 'w') as outfile: + json.dump(metrics_dict,outfile) + print('Metrics saved in: ' + fnm + '.json') + parameter.var_region = region plot(parameter.current_set, mv2_domain, mv1_domain, diff, metrics_dict, parameter) From d814e5e8501546c66ab52b67eae3963608e44035 Mon Sep 17 00:00:00 2001 From: chengzhuzhang Date: Mon, 6 Nov 2017 13:37:34 -0800 Subject: [PATCH 02/11] save metrics in csv format --- acme_diags/acme_diags_driver.py | 71 +++++++++++++++++++++++++++++++++ acme_diags/acme_viewer.py | 1 + 2 files changed, 72 insertions(+) diff --git a/acme_diags/acme_diags_driver.py b/acme_diags/acme_diags_driver.py index 7030a069b..faf6d65d1 100644 --- a/acme_diags/acme_diags_driver.py +++ b/acme_diags/acme_diags_driver.py @@ -11,6 +11,75 @@ from acme_diags.acme_parser import ACMEParser from acme_diags.acme_viewer import create_viewer from acme_diags.driver.utils import get_set_name +import json +import collections +import csv + + +def create_lat_lon_metrics_table(root_dir, parameters): + + metrics_info = collections.OrderedDict() + for parameter in parameters: + for set_num in parameter.sets: + set_num = get_set_name(set_num) + print(set_num) + if set_num in ['lat_lon','5']: + + # ref_name-variable-season-region + # or + # ref_name-variable-plev'mb'-season-region + ref_name = parameter.ref_name + + ## Save data in .cvs as a table + #header = ['Variables','Model mean','Obs mean','Mean Bias','RMSE','correlation'] + #table_data = np.empty([len(parameter.variables), 6]) + + for var in parameter.variables: + for season in parameter.seasons: + for region in parameter.regions: + # because some parameters have plevs, there might be + # more than one row_name, fnm pair + row_name_and_fnm = [] + + if parameter.plevs == []: # 2d variables + row_name = '{} {} {}'.format(var, region, ref_name) + fnm = '{}-{}-{}-{}'.format(ref_name, + var, season, region) + row_name_and_fnm.append((row_name, fnm)) + else: # 3d variables + for plev in parameter.plevs: + row_name = '{} {} {} {}'.format( + var, str(int(plev)) + ' mb', region, ref_name) + fnm = '{}-{}-{}-{}-{}'.format( + ref_name, var, int(plev), season, region) + row_name_and_fnm.append((row_name, fnm)) + print(row_name_and_fnm) + metrics_path = os.path.join( + #'..', '{}'.format(set_num), parameter.case_id, fnm) + parameter.results_dir, '{}'.format(set_num), parameter.case_id, fnm) + metrics_dic = json.load(open(metrics_path + '.json')) + + if season not in metrics_info: + metrics_info[season] = collections.OrderedDict() + if row_name not in metrics_info[season]: + metrics_info[season][row_name] = collections.OrderedDict() + metrics_info[season][row_name]['metrics'] = metrics_dic + + # save metrics information in .csv table + header = ['Variables','Model mean','Obs mean','Mean Bias','RMSE','correlation'] + for season in parameter.seasons: + table_path = os.path.abspath(os.path.join( + #'..', '{}'.format(set_num), parameter.case_id, fnm) + parameter.results_dir, '{}'.format(set_num))) + print(table_path) + with open(table_path + '/' + season + '_metrics_table.csv','w') as f1: + writer=csv.writer(f1, delimiter=',',lineterminator='\n', quoting=csv.QUOTE_NONE) + writer.writerow(header) + for key, metrics_dic in metrics_info[season].items(): + metrics = metrics_dic['metrics'] + row = [key, round(metrics['test_regrid']['mean'],3),round(metrics['ref_regrid']['mean'],3), round(metrics['test_regrid']['mean'] - metrics['ref_regrid']['mean'],3), round(metrics['misc']['rmse'],3), round(metrics['misc']['corr'],3)] + writer.writerow(row) + def _get_default_diags(set_num, dataset): @@ -130,6 +199,8 @@ def run_diag(parameters): if not os.path.exists(pth): os.makedirs(pth) + create_lat_lon_metrics_table(pth, parameters) + create_viewer(pth, parameters, parameters[0].output_format[0]) else: print('There was not a single valid diagnostics run, no viewer created') diff --git a/acme_diags/acme_viewer.py b/acme_diags/acme_viewer.py index 426ae5ca6..77779bd1a 100644 --- a/acme_diags/acme_viewer.py +++ b/acme_diags/acme_viewer.py @@ -207,6 +207,7 @@ def create_viewer(root_dir, parameters, ext): ref_name, var, int(plev), season, region) row_name_and_fnm.append((row_name, fnm)) + print(ROW_INFO) for row_name, fnm in row_name_and_fnm: if parameter.case_id not in ROW_INFO[set_num]: ROW_INFO[set_num][parameter.case_id] = collections.OrderedDict( From 4e6f843c3d59fb65b840ecdf8b441dc874586f82 Mon Sep 17 00:00:00 2001 From: chengzhuzhang Date: Mon, 6 Nov 2017 14:10:25 -0800 Subject: [PATCH 03/11] add try catch for non-available data json --- acme_diags/acme_diags_driver.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/acme_diags/acme_diags_driver.py b/acme_diags/acme_diags_driver.py index faf6d65d1..af87b6b04 100644 --- a/acme_diags/acme_diags_driver.py +++ b/acme_diags/acme_diags_driver.py @@ -29,10 +29,6 @@ def create_lat_lon_metrics_table(root_dir, parameters): # or # ref_name-variable-plev'mb'-season-region ref_name = parameter.ref_name - - ## Save data in .cvs as a table - #header = ['Variables','Model mean','Obs mean','Mean Bias','RMSE','correlation'] - #table_data = np.empty([len(parameter.variables), 6]) for var in parameter.variables: for season in parameter.seasons: @@ -57,13 +53,19 @@ def create_lat_lon_metrics_table(root_dir, parameters): metrics_path = os.path.join( #'..', '{}'.format(set_num), parameter.case_id, fnm) parameter.results_dir, '{}'.format(set_num), parameter.case_id, fnm) - metrics_dic = json.load(open(metrics_path + '.json')) + try: + metrics_dic = json.load(open(metrics_path + '.json')) + except Exception as e: + print(e) + continue if season not in metrics_info: metrics_info[season] = collections.OrderedDict() if row_name not in metrics_info[season]: metrics_info[season][row_name] = collections.OrderedDict() metrics_info[season][row_name]['metrics'] = metrics_dic + + # save metrics information in .csv table header = ['Variables','Model mean','Obs mean','Mean Bias','RMSE','correlation'] From dbe58659c1e7d878885a495cb2cb4103f29b633b Mon Sep 17 00:00:00 2001 From: chengzhuzhang Date: Mon, 6 Nov 2017 14:33:00 -0800 Subject: [PATCH 04/11] delete print line from viewer --- acme_diags/acme_viewer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/acme_diags/acme_viewer.py b/acme_diags/acme_viewer.py index 77779bd1a..426ae5ca6 100644 --- a/acme_diags/acme_viewer.py +++ b/acme_diags/acme_viewer.py @@ -207,7 +207,6 @@ def create_viewer(root_dir, parameters, ext): ref_name, var, int(plev), season, region) row_name_and_fnm.append((row_name, fnm)) - print(ROW_INFO) for row_name, fnm in row_name_and_fnm: if parameter.case_id not in ROW_INFO[set_num]: ROW_INFO[set_num][parameter.case_id] = collections.OrderedDict( From 8eff67818c3bf35b88369a51c7e1fdb51f58ebd3 Mon Sep 17 00:00:00 2001 From: chengzhuzhang Date: Wed, 8 Nov 2017 11:25:32 -0800 Subject: [PATCH 05/11] add unit column to table --- acme_diags/acme_diags_driver.py | 4 ++-- acme_diags/driver/lat_lon_driver.py | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/acme_diags/acme_diags_driver.py b/acme_diags/acme_diags_driver.py index af87b6b04..0a4d6660a 100644 --- a/acme_diags/acme_diags_driver.py +++ b/acme_diags/acme_diags_driver.py @@ -68,7 +68,7 @@ def create_lat_lon_metrics_table(root_dir, parameters): # save metrics information in .csv table - header = ['Variables','Model mean','Obs mean','Mean Bias','RMSE','correlation'] + header = ['Variables','Unit','Model mean','Obs mean','Mean Bias','RMSE','correlation'] for season in parameter.seasons: table_path = os.path.abspath(os.path.join( #'..', '{}'.format(set_num), parameter.case_id, fnm) @@ -79,7 +79,7 @@ def create_lat_lon_metrics_table(root_dir, parameters): writer.writerow(header) for key, metrics_dic in metrics_info[season].items(): metrics = metrics_dic['metrics'] - row = [key, round(metrics['test_regrid']['mean'],3),round(metrics['ref_regrid']['mean'],3), round(metrics['test_regrid']['mean'] - metrics['ref_regrid']['mean'],3), round(metrics['misc']['rmse'],3), round(metrics['misc']['corr'],3)] + row = [key, metrics['unit'], round(metrics['test_regrid']['mean'],3),round(metrics['ref_regrid']['mean'],3), round(metrics['test_regrid']['mean'] - metrics['ref_regrid']['mean'],3), round(metrics['misc']['rmse'],3), round(metrics['misc']['corr'],3)] writer.writerow(row) diff --git a/acme_diags/driver/lat_lon_driver.py b/acme_diags/driver/lat_lon_driver.py index fe5452209..4cdac36d0 100755 --- a/acme_diags/driver/lat_lon_driver.py +++ b/acme_diags/driver/lat_lon_driver.py @@ -181,6 +181,8 @@ def run_diag(parameter): metrics_dict = create_metrics( mv2_domain, mv1_domain, mv2_reg, mv1_reg, diff) + metrics_dict['unit'] = mv1_reg.units + fnm = os.path.join(get_output_dir( parameter.current_set, parameter), parameter.output_file) with open(fnm + '.json' , 'w') as outfile: @@ -236,6 +238,8 @@ def run_diag(parameter): metrics_dict = create_metrics( mv2_domain, mv1_domain, mv2_reg, mv1_reg, diff) + metrics_dict['unit'] = mv1_reg.units + fnm = os.path.join(get_output_dir( parameter.current_set, parameter), parameter.output_file) with open(fnm + '.json' , 'w') as outfile: From eff5637fd273c22e3c96af74fb23613737c4c9e8 Mon Sep 17 00:00:00 2001 From: chengzhuzhang Date: Wed, 8 Nov 2017 15:24:10 -0800 Subject: [PATCH 06/11] create htmls from csv --- acme_diags/acme_diags_driver.py | 34 +++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/acme_diags/acme_diags_driver.py b/acme_diags/acme_diags_driver.py index 0a4d6660a..1f380e119 100644 --- a/acme_diags/acme_diags_driver.py +++ b/acme_diags/acme_diags_driver.py @@ -81,9 +81,43 @@ def create_lat_lon_metrics_table(root_dir, parameters): metrics = metrics_dic['metrics'] row = [key, metrics['unit'], round(metrics['test_regrid']['mean'],3),round(metrics['ref_regrid']['mean'],3), round(metrics['test_regrid']['mean'] - metrics['ref_regrid']['mean'],3), round(metrics['misc']['rmse'],3), round(metrics['misc']['corr'],3)] writer.writerow(row) + + # convert csv to html + + read_csv = csv.reader(open(table_path + '/' + season + '_metrics_table.csv')) + htmlfile = open(table_path + '/' + season + '_metrics_table.html','w+') + htmlfile.write('

'+ season + ' Mean' + '

') + # initialize rownum variable + rownum = 0 + # write tag + htmlfile.write('
') + # generate table contents + + for row in read_csv: # Read a single row from the CSV file + + # write header row. assumes first row in csv contains header + if rownum == 0: + htmlfile.write('') # write tag + for column in row: + htmlfile.write('') + htmlfile.write('') + + # write all other rows + else: + htmlfile.write('
') + #htmlfile.write('
') + for column in row: + htmlfile.write('') + #htmlfile.write('') + htmlfile.write('') + #increment row count + rownum += 1 + # write
' + column +'
' + column +'
tag + htmlfile.write('') + def _get_default_diags(set_num, dataset): """Returns the path from the json corresponding to set_num""" set_num = get_set_name(set_num) From 149774ac88242e2a6b4bc7619101c43e21534bab Mon Sep 17 00:00:00 2001 From: zshaheen Date: Mon, 13 Nov 2017 18:24:38 -0800 Subject: [PATCH 07/11] Reformatted lat-lon table to viewer. --- acme_diags/acme_diags_driver.py | 105 +------------------------------- acme_diags/acme_viewer.py | 87 +++++++++++++++++++++++++- 2 files changed, 86 insertions(+), 106 deletions(-) diff --git a/acme_diags/acme_diags_driver.py b/acme_diags/acme_diags_driver.py index 1f380e119..73c3ecf8a 100644 --- a/acme_diags/acme_diags_driver.py +++ b/acme_diags/acme_diags_driver.py @@ -16,108 +16,6 @@ import csv -def create_lat_lon_metrics_table(root_dir, parameters): - - metrics_info = collections.OrderedDict() - for parameter in parameters: - for set_num in parameter.sets: - set_num = get_set_name(set_num) - print(set_num) - if set_num in ['lat_lon','5']: - - # ref_name-variable-season-region - # or - # ref_name-variable-plev'mb'-season-region - ref_name = parameter.ref_name - - for var in parameter.variables: - for season in parameter.seasons: - for region in parameter.regions: - # because some parameters have plevs, there might be - # more than one row_name, fnm pair - row_name_and_fnm = [] - - if parameter.plevs == []: # 2d variables - row_name = '{} {} {}'.format(var, region, ref_name) - fnm = '{}-{}-{}-{}'.format(ref_name, - var, season, region) - row_name_and_fnm.append((row_name, fnm)) - else: # 3d variables - for plev in parameter.plevs: - row_name = '{} {} {} {}'.format( - var, str(int(plev)) + ' mb', region, ref_name) - fnm = '{}-{}-{}-{}-{}'.format( - ref_name, var, int(plev), season, region) - row_name_and_fnm.append((row_name, fnm)) - print(row_name_and_fnm) - metrics_path = os.path.join( - #'..', '{}'.format(set_num), parameter.case_id, fnm) - parameter.results_dir, '{}'.format(set_num), parameter.case_id, fnm) - try: - metrics_dic = json.load(open(metrics_path + '.json')) - except Exception as e: - print(e) - continue - - if season not in metrics_info: - metrics_info[season] = collections.OrderedDict() - if row_name not in metrics_info[season]: - metrics_info[season][row_name] = collections.OrderedDict() - metrics_info[season][row_name]['metrics'] = metrics_dic - - - - # save metrics information in .csv table - header = ['Variables','Unit','Model mean','Obs mean','Mean Bias','RMSE','correlation'] - for season in parameter.seasons: - table_path = os.path.abspath(os.path.join( - #'..', '{}'.format(set_num), parameter.case_id, fnm) - parameter.results_dir, '{}'.format(set_num))) - print(table_path) - with open(table_path + '/' + season + '_metrics_table.csv','w') as f1: - writer=csv.writer(f1, delimiter=',',lineterminator='\n', quoting=csv.QUOTE_NONE) - writer.writerow(header) - for key, metrics_dic in metrics_info[season].items(): - metrics = metrics_dic['metrics'] - row = [key, metrics['unit'], round(metrics['test_regrid']['mean'],3),round(metrics['ref_regrid']['mean'],3), round(metrics['test_regrid']['mean'] - metrics['ref_regrid']['mean'],3), round(metrics['misc']['rmse'],3), round(metrics['misc']['corr'],3)] - writer.writerow(row) - - # convert csv to html - - read_csv = csv.reader(open(table_path + '/' + season + '_metrics_table.csv')) - htmlfile = open(table_path + '/' + season + '_metrics_table.html','w+') - htmlfile.write('

'+ season + ' Mean' + '

') - # initialize rownum variable - rownum = 0 - # write tag - htmlfile.write('
') - # generate table contents - - for row in read_csv: # Read a single row from the CSV file - - # write header row. assumes first row in csv contains header - if rownum == 0: - htmlfile.write('') # write tag - for column in row: - htmlfile.write('') - htmlfile.write('') - - # write all other rows - else: - htmlfile.write('
') - #htmlfile.write('
') - for column in row: - htmlfile.write('') - #htmlfile.write('') - htmlfile.write('') - #increment row count - rownum += 1 - # write
' + column +'
' + column +'
tag - htmlfile.write('') - - - - def _get_default_diags(set_num, dataset): """Returns the path from the json corresponding to set_num""" set_num = get_set_name(set_num) @@ -235,8 +133,7 @@ def run_diag(parameters): if not os.path.exists(pth): os.makedirs(pth) - create_lat_lon_metrics_table(pth, parameters) - create_viewer(pth, parameters, parameters[0].output_format[0]) + else: print('There was not a single valid diagnostics run, no viewer created') diff --git a/acme_diags/acme_viewer.py b/acme_diags/acme_viewer.py index 426ae5ca6..1270a986e 100644 --- a/acme_diags/acme_viewer.py +++ b/acme_diags/acme_viewer.py @@ -4,9 +4,11 @@ import datetime import shutil import collections +import csv +import json from bs4 import BeautifulSoup -import acme_diags from cdp.cdp_viewer import OutputViewer +import acme_diags from acme_diags.driver.utils import get_set_name # Dict of @@ -24,6 +26,8 @@ # needed so we can have a cols in order of ANN, DJF, MAM, JJA, SON ROW_INFO = collections.OrderedDict() +# A similar dict for creating the lat-lon tables +LAT_LON_TABLE_INFO = collections.OrderedDict() def _copy_acme_logo(root_dir): """Copy over ACME_Logo.png to root_dir/viewer""" @@ -169,7 +173,81 @@ def _better_page_name(old_name): return 'CloudTopHeight-Tau joint histograms' else: return old_name - + +def _add_to_lat_lon_metrics_table(metrics_path, season, row_name): + """Add the metrics for the current season and row_name to the lat-lon table""" + with open(metrics_path + '.json') as json_file: + metrics_dict = json.load(json_file) + + if season not in LAT_LON_TABLE_INFO: + LAT_LON_TABLE_INFO[season] = collections.OrderedDict() + if row_name not in LAT_LON_TABLE_INFO[season]: + LAT_LON_TABLE_INFO[season][row_name] = collections.OrderedDict() + LAT_LON_TABLE_INFO[season][row_name]['metrics'] = metrics_dict + +def _create_csv_from_dict(output_dir, season): + """Create a csv for a season in LAT_LON_TABLE_INFO in output_dir and return the path to it""" + + # output_dir = os.path.join(parameter.results_dir, '{}'.format(set_num)) + + table_path = os.path.abspath(os.path.join(output_dir, season + '_metrics_table.csv')) + + col_names = ['Variables', 'Unit', 'Model mean', 'Obs mean', 'Mean Bias', 'RMSE', 'correlation'] + + with open(table_path, 'w') as table_csv: + writer=csv.writer(table_csv, delimiter=',', lineterminator='\n', quoting=csv.QUOTE_NONE) + writer.writerow(col_names) + for key, metrics_dic in LAT_LON_TABLE_INFO[season].items(): + metrics = metrics_dic['metrics'] + row = [key, metrics['unit'], round(metrics['test_regrid']['mean'],3), round(metrics['ref_regrid']['mean'],3), round(metrics['test_regrid']['mean'] - metrics['ref_regrid']['mean'],3), round(metrics['misc']['rmse'],3), round(metrics['misc']['corr'],3)] + writer.writerow(row) + + return table_path + +def _cvs_to_html(csv_path, season): + """Convert the csv for a season located at csv_path to an HTML, returning the path to the HTML""" + html_path = csv_path.replace('csv', 'html') + + with open(html_path, 'w') as htmlfile: + htmlfile.write('

{} Mean

'.format(season)) + htmlfile.write('') + + with open(csv_path) as csv_file: + read_csv = csv.reader(csv_file) + + # generate table contents + for num, row in enumerate(read_csv): + + # write the header row, assuming the first row in csv contains the header + if num == 0: + htmlfile.write('') + for column in row: + htmlfile.write(''.format(column)) + htmlfile.write('') + + # write all other rows + else: + htmlfile.write('
') + for column in row: + htmlfile.write('
'.format(column)) + htmlfile.write('') + + htmlfile.write('
{}
{}
') + + return html_path + +def _add_lat_lon_table_to_viewer(csv_path): + """Add a link to the lat-lon table to the viewer""" + # TODO: Implement this, but first talk to the others about how they want it to look + pass + +def generate_lat_lon_metrics_table(root_dir): + """For each season in LAT_LON_TABLE_INFO, create a csv, convert it to an html and append that html to the viewer.""" + for season in LAT_LON_TABLE_INFO: + csv_path = _create_csv_from_dict(root_dir, season) + html_path = _cvs_to_html(csv_path, season) + _add_lat_lon_table_to_viewer(html_path) + # print('Path to lat-lon table: {}'.format(html_path)) def create_viewer(root_dir, parameters, ext): """Based of the parameters, find the files with @@ -207,6 +285,10 @@ def create_viewer(root_dir, parameters, ext): ref_name, var, int(plev), season, region) row_name_and_fnm.append((row_name, fnm)) + if set_num in ['lat_lon', '5']: + metrics_path = os.path.join(parameter.results_dir, '{}'.format(set_num), parameter.case_id, fnm) + _add_to_lat_lon_metrics_table(metrics_path, season, row_name) + for row_name, fnm in row_name_and_fnm: if parameter.case_id not in ROW_INFO[set_num]: ROW_INFO[set_num][parameter.case_id] = collections.OrderedDict( @@ -256,4 +338,5 @@ def create_viewer(root_dir, parameters, ext): title=col_season, other_files=formatted_files) viewer.generate_viewer(prompt_user=False) + generate_lat_lon_metrics_table(root_dir) _extras(root_dir, parameters) From 7bf7178f207e1442d68c26ccddc33fc7d4d4e834 Mon Sep 17 00:00:00 2001 From: zshaheen Date: Mon, 13 Nov 2017 20:13:48 -0800 Subject: [PATCH 08/11] Everything works, but it's ugly :( --- acme_diags/__init__.py | 3 +- acme_diags/acme_viewer.py | 85 ++++++++++++++++++++++++++++++++++++--- 2 files changed, 80 insertions(+), 8 deletions(-) diff --git a/acme_diags/__init__.py b/acme_diags/__init__.py index e6cb5d6e7..43ef892b2 100644 --- a/acme_diags/__init__.py +++ b/acme_diags/__init__.py @@ -1,2 +1 @@ - -__version__ = 'v1.0.0' +__version__ = 'v1.1.0' diff --git a/acme_diags/acme_viewer.py b/acme_diags/acme_viewer.py index 1270a986e..b2727a265 100644 --- a/acme_diags/acme_viewer.py +++ b/acme_diags/acme_viewer.py @@ -236,18 +236,91 @@ def _cvs_to_html(csv_path, season): return html_path -def _add_lat_lon_table_to_viewer(csv_path): +def _create_lat_lon_table_index(table_dir): + """Create an index.html that links the individual htmls for the lat-lon table.""" + html = ''' + + + Latitude Longitude Table + + + + + +
+ + + ''' + soup = BeautifulSoup(html, "lxml") + for t in soup.find_all("table"): + + # Add the title of the col + titles = soup.new_tag("tr") + for season in LAT_LON_TABLE_INFO: + title = soup.new_tag("th") + title.append(season) + titles.append(title) + t.append(titles) + + rows = soup.new_tag("tr") + for season in LAT_LON_TABLE_INFO: + row_url = "{}_metrics_table.html".format(season) + td = soup.new_tag("td") + a = soup.new_tag("a", href=row_url) + a.append(season) + td.append(a) + rows.append(td) + t.append(rows) + break # there should only be one table tag + + html = soup.prettify("utf-8") + index_page = os.path.join(table_dir, 'index.html') + with open(index_page, "wb") as f: + f.write(html) + +def _add_lat_lon_table_to_viewer(csv_path, root_dir): """Add a link to the lat-lon table to the viewer""" - # TODO: Implement this, but first talk to the others about how they want it to look - pass + + # Add this to index.html: + + # + # Latitiude-Longitude Table + # + # underneath this, which is already in index.html: + # + # Latitude-Longitude contour maps + # + + index_page = os.path.join(root_dir, 'index.html') + soup = BeautifulSoup(open(index_page), "lxml") + + table_index_path = 'table/index.html' + table_a = soup.new_tag("a", href=table_index_path, style="padding-left:1em") + table_a.append("Latitiude-Longitude Table") + + # append the new tag underneath the old one, so add it to the parent of the old one + for a in soup.find_all('a'): + if _better_page_name('lat_lon') in a: + parent = a.parent + parent.append(table_a) + + html = soup.prettify("utf-8") + with open(index_page, "wb") as f: + f.write(html) def generate_lat_lon_metrics_table(root_dir): """For each season in LAT_LON_TABLE_INFO, create a csv, convert it to an html and append that html to the viewer.""" + table_dir = os.path.join(root_dir, 'table') # output_dir/viewer/table + table_dir = os.path.abspath(table_dir) + if not os.path.exists(table_dir): + os.mkdir(table_dir) + + _create_lat_lon_table_index(table_dir) + for season in LAT_LON_TABLE_INFO: - csv_path = _create_csv_from_dict(root_dir, season) + csv_path = _create_csv_from_dict(table_dir, season) html_path = _cvs_to_html(csv_path, season) - _add_lat_lon_table_to_viewer(html_path) - # print('Path to lat-lon table: {}'.format(html_path)) + _add_lat_lon_table_to_viewer(html_path, root_dir) def create_viewer(root_dir, parameters, ext): """Based of the parameters, find the files with From bb26ad3e988ef50ed4aa0e0844cf412ef4f33ca0 Mon Sep 17 00:00:00 2001 From: zshaheen Date: Mon, 20 Nov 2017 09:58:57 -0800 Subject: [PATCH 09/11] Table works with one season. --- acme_diags/acme_viewer.py | 163 ++++++++++++++++++++++++++------------ 1 file changed, 114 insertions(+), 49 deletions(-) diff --git a/acme_diags/acme_viewer.py b/acme_diags/acme_viewer.py index b2727a265..a500bf845 100644 --- a/acme_diags/acme_viewer.py +++ b/acme_diags/acme_viewer.py @@ -4,6 +4,7 @@ import datetime import shutil import collections +import copy import csv import json from bs4 import BeautifulSoup @@ -131,7 +132,10 @@ def _extras(root_dir, parameters): parameters[0].test_name, dt, path) h1_to_h3(f) + _edit_table_html(root_dir) + _add_lat_lon_table_to_viewer_index(root_dir) + def _add_pages_and_top_row(viewer, parameters): """Add the page and columns of the page""" set_to_seasons = collections.OrderedDict() # dict of {set: [seasons]} @@ -187,10 +191,8 @@ def _add_to_lat_lon_metrics_table(metrics_path, season, row_name): def _create_csv_from_dict(output_dir, season): """Create a csv for a season in LAT_LON_TABLE_INFO in output_dir and return the path to it""" - - # output_dir = os.path.join(parameter.results_dir, '{}'.format(set_num)) - - table_path = os.path.abspath(os.path.join(output_dir, season + '_metrics_table.csv')) + #table_path = os.path.abspath(os.path.join(output_dir, season + '_metrics_table.csv')) + table_path = os.path.join(output_dir, season + '_metrics_table.csv') col_names = ['Variables', 'Unit', 'Model mean', 'Obs mean', 'Mean Bias', 'RMSE', 'correlation'] @@ -236,51 +238,113 @@ def _cvs_to_html(csv_path, season): return html_path -def _create_lat_lon_table_index(table_dir): - """Create an index.html that links the individual htmls for the lat-lon table.""" - html = ''' - - - Latitude Longitude Table - - - - - -
- - + +def _add_html_to_col(season, season_path, html_path): + """Since the output viewer doesn't support html images, do this hack. + For the col in the html at html_path, insert the link to col_path.""" + # Change: + # + # + # + # + # + # + # + # to: + # + # + # + # {season} + # + # + # + + soup = BeautifulSoup(open(html_path), "lxml") + + for tr in soup.find_all("tr", {"class": "output-row"}): + index = ['All variables', 'ANN', 'DJF', 'MAM', 'JJA', 'SON'].index(season) + cols = tr.find_all("td") # the cols are ['All variables', 'ANN', 'DJF', 'MAM', 'JJA', 'SON'] + td = cols[index] # get the HTML element related to the season + + url = os.path.join('..', '..', '..', season_path) + a = soup.new_tag("a", href=url) + a.append(season) + + td.string = '' + td.append(a) + + html = soup.prettify("utf-8") + with open(html_path, "wb") as f: + f.write(html) + +def _edit_table_html(root_dir): + """After the viewer is created, edit the table html to insert the custom htmls""" + for s in ['ANN', 'DJF', 'MAM', 'JJA', 'SON']: + if s in LAT_LON_TABLE_INFO: + _add_html_to_col(s, LAT_LON_TABLE_INFO[s]['html_path'], os.path.join(root_dir, 'table', 'index.html')) + +def _create_lat_lon_table_index(viewer, root_dir): + """Create an index in the viewer that links the individual htmls for the lat-lon table.""" + seasons = ['ANN', 'DJF', 'MAM', 'JJA', 'SON'] + viewer.add_page('Table', seasons) + viewer.add_group('Something Table') + viewer.add_row('All variables') + + for s in seasons: + if s in LAT_LON_TABLE_INFO: + viewer.add_col(LAT_LON_TABLE_INFO[s]['html_path'], is_file=False, title=s) + else: + viewer.add_col('-----', is_file=True, title='-----') + +def _add_lat_lon_table_to_viewer_index(root_dir): + """Move the link to Table next to the link to Latitude-Longitude contour maps""" + + index_page = os.path.join(root_dir, 'index.html') + soup = BeautifulSoup(open(index_page), "lxml") + + # append the new tag underneath the old one, so add it to the parent of the old one + + td_to_move = None + for tr in soup.find_all("tr"): + for td in tr.find_all("td"): + for a in td.find_all("a"): + if 'table' in a['href']: + td_to_move = copy.deepcopy(td) + tr.decompose() + + if td_to_move: + for tr in soup.find_all("tr"): + for td in tr.find_all("td"): + for a in td.find_all("a"): + print 'a' + print a + print "_better_page_name('lat_lon')" + print _better_page_name('lat_lon') + if _better_page_name('lat_lon') in a.string: + print 'moving shit!' + print 'moving shit!' + print 'moving shit!' + td.append(td_to_move) + + html = soup.prettify("utf-8") + with open(index_page, "wb") as f: + f.write(html) + ''' - soup = BeautifulSoup(html, "lxml") - for t in soup.find_all("table"): - - # Add the title of the col - titles = soup.new_tag("tr") - for season in LAT_LON_TABLE_INFO: - title = soup.new_tag("th") - title.append(season) - titles.append(title) - t.append(titles) - - rows = soup.new_tag("tr") - for season in LAT_LON_TABLE_INFO: - row_url = "{}_metrics_table.html".format(season) - td = soup.new_tag("td") - a = soup.new_tag("a", href=row_url) - a.append(season) - td.append(a) - rows.append(td) - t.append(rows) - break # there should only be one table tag + for a in soup.find_all('a'): + if _better_page_name('lat_lon') in a: + parent = a.parent + parent.append(table_a) + # Remove the tr with the Table in it html = soup.prettify("utf-8") - index_page = os.path.join(table_dir, 'index.html') with open(index_page, "wb") as f: f.write(html) + ''' -def _add_lat_lon_table_to_viewer(csv_path, root_dir): - """Add a link to the lat-lon table to the viewer""" + + ''' # Add this to index.html: # @@ -307,20 +371,21 @@ def _add_lat_lon_table_to_viewer(csv_path, root_dir): html = soup.prettify("utf-8") with open(index_page, "wb") as f: f.write(html) + ''' -def generate_lat_lon_metrics_table(root_dir): +def generate_lat_lon_metrics_table(viewer, root_dir): """For each season in LAT_LON_TABLE_INFO, create a csv, convert it to an html and append that html to the viewer.""" - table_dir = os.path.join(root_dir, 'table') # output_dir/viewer/table - table_dir = os.path.abspath(table_dir) + table_dir = os.path.join(root_dir, 'table-data') # output_dir/viewer/table-data + if not os.path.exists(table_dir): os.mkdir(table_dir) - _create_lat_lon_table_index(table_dir) - for season in LAT_LON_TABLE_INFO: csv_path = _create_csv_from_dict(table_dir, season) html_path = _cvs_to_html(csv_path, season) - _add_lat_lon_table_to_viewer(html_path, root_dir) + LAT_LON_TABLE_INFO[season]['html_path'] = html_path + + _create_lat_lon_table_index(viewer, root_dir) def create_viewer(root_dir, parameters, ext): """Based of the parameters, find the files with @@ -410,6 +475,6 @@ def create_viewer(root_dir, parameters, ext): viewer.add_col(fnm + '.' + ext, is_file=True, title=col_season, other_files=formatted_files) + generate_lat_lon_metrics_table(viewer, root_dir) viewer.generate_viewer(prompt_user=False) - generate_lat_lon_metrics_table(root_dir) _extras(root_dir, parameters) From 196a8f1646da615cae04d6ba2eb9aea7a14d9f3f Mon Sep 17 00:00:00 2001 From: zshaheen Date: Tue, 21 Nov 2017 11:57:44 -0800 Subject: [PATCH 10/11] Check if json actually exists before making the table. --- acme_diags/acme_viewer.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/acme_diags/acme_viewer.py b/acme_diags/acme_viewer.py index b2727a265..9daee44d7 100644 --- a/acme_diags/acme_viewer.py +++ b/acme_diags/acme_viewer.py @@ -360,8 +360,11 @@ def create_viewer(root_dir, parameters, ext): if set_num in ['lat_lon', '5']: metrics_path = os.path.join(parameter.results_dir, '{}'.format(set_num), parameter.case_id, fnm) - _add_to_lat_lon_metrics_table(metrics_path, season, row_name) - + if os.path.exists(metrics_path + '.json'): + _add_to_lat_lon_metrics_table(metrics_path, season, row_name) + else: + print('JSON does not exist: {}'.format(metrics_path + '.json')) + continue for row_name, fnm in row_name_and_fnm: if parameter.case_id not in ROW_INFO[set_num]: ROW_INFO[set_num][parameter.case_id] = collections.OrderedDict( From c214b9ba978494509f66c478742d1e57380f1a8b Mon Sep 17 00:00:00 2001 From: zshaheen Date: Tue, 5 Dec 2017 15:19:35 -0800 Subject: [PATCH 11/11] Cleanup. --- acme_diags/acme_viewer.py | 54 +-------------------------------------- 1 file changed, 1 insertion(+), 53 deletions(-) diff --git a/acme_diags/acme_viewer.py b/acme_diags/acme_viewer.py index 0cc4a0f24..d72801af8 100644 --- a/acme_diags/acme_viewer.py +++ b/acme_diags/acme_viewer.py @@ -287,7 +287,7 @@ def _create_lat_lon_table_index(viewer, root_dir): """Create an index in the viewer that links the individual htmls for the lat-lon table.""" seasons = ['ANN', 'DJF', 'MAM', 'JJA', 'SON'] viewer.add_page('Table', seasons) - viewer.add_group('Something Table') + viewer.add_group('Summary Table') viewer.add_row('All variables') for s in seasons: @@ -298,12 +298,10 @@ def _create_lat_lon_table_index(viewer, root_dir): def _add_lat_lon_table_to_viewer_index(root_dir): """Move the link to Table next to the link to Latitude-Longitude contour maps""" - index_page = os.path.join(root_dir, 'index.html') soup = BeautifulSoup(open(index_page), "lxml") # append the new tag underneath the old one, so add it to the parent of the old one - td_to_move = None for tr in soup.find_all("tr"): for td in tr.find_all("td"): @@ -316,63 +314,13 @@ def _add_lat_lon_table_to_viewer_index(root_dir): for tr in soup.find_all("tr"): for td in tr.find_all("td"): for a in td.find_all("a"): - print 'a' - print a - print "_better_page_name('lat_lon')" - print _better_page_name('lat_lon') if _better_page_name('lat_lon') in a.string: - print 'moving shit!' - print 'moving shit!' - print 'moving shit!' td.append(td_to_move) html = soup.prettify("utf-8") with open(index_page, "wb") as f: f.write(html) - ''' - for a in soup.find_all('a'): - if _better_page_name('lat_lon') in a: - parent = a.parent - parent.append(table_a) - - # Remove the tr with the Table in it - html = soup.prettify("utf-8") - with open(index_page, "wb") as f: - f.write(html) - ''' - - - - ''' - # Add this to index.html: - - # - # Latitiude-Longitude Table - # - # underneath this, which is already in index.html: - # - # Latitude-Longitude contour maps - # - - index_page = os.path.join(root_dir, 'index.html') - soup = BeautifulSoup(open(index_page), "lxml") - - table_index_path = 'table/index.html' - table_a = soup.new_tag("a", href=table_index_path, style="padding-left:1em") - table_a.append("Latitiude-Longitude Table") - - # append the new tag underneath the old one, so add it to the parent of the old one - for a in soup.find_all('a'): - if _better_page_name('lat_lon') in a: - parent = a.parent - parent.append(table_a) - - html = soup.prettify("utf-8") - with open(index_page, "wb") as f: - f.write(html) - ''' - def generate_lat_lon_metrics_table(viewer, root_dir): """For each season in LAT_LON_TABLE_INFO, create a csv, convert it to an html and append that html to the viewer.""" table_dir = os.path.join(root_dir, 'table-data') # output_dir/viewer/table-data