Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding initial simple logging #6 #29

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 11 additions & 2 deletions scripts/GEAR/convert_GEAR_beam.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
# and be adapted for your own datasets.

import os
import logging
import sys
import apache_beam as beam
from pangeo_forge_recipes.patterns import ConcatDim, FilePattern
Expand All @@ -19,6 +20,7 @@
T,
)
from pangeo_forge_recipes.types import Indexed
logging.basicConfig(level=logging.INFO)

from GEAR_config import load_yaml_config

Expand All @@ -29,6 +31,12 @@
file_path = sys.argv[1]
config = load_yaml_config(file_path)

logging.info('Converting data in ' + config.input_dir + ' from ' + str(config.start_year) + ' to ' + str(config.end_year))
logging.info('Outputting to ' + config.store_name + ' in ' + config.target_root)
logging.info('Rechunking to ' + str(config.target_chunks) + ' using ' + str(config.num_workers) + ' process(es)')
if config.prune > 0:
logging.info('Only using first ' + str(config.prune) + ' files')

if not os.path.exists(config.target_root):
os.makedirs(config.target_root)

Expand Down Expand Up @@ -70,9 +78,9 @@ def _datavar_to_coordvar(item: Indexed[T]) -> Indexed[T]:
# Here we convert some of the variables in the file
# to coordinate variables so that pangeo-forge-recipes
# can process them
print(f'Preprocessing before {ds =}')
logging.info(f'Dataset chunk before preprocessing: {ds =}')
ds = ds.set_coords(['x_bnds', 'y_bnds', 'time_bnds', 'crs'])
print(f'Preprocessing after {ds =}')
logging.info(f'Dataset chunk after preprocessing: {ds =}')
return index, ds

# this expand function is a necessary part of
Expand All @@ -96,6 +104,7 @@ def expand(self, pcoll: beam.PCollection) -> beam.PCollection:
| ConsolidateMetadata()
)

logging.info('Executing pipeline...')
if config.num_workers > 1:
beam_options = PipelineOptions(
direct_num_workers=config.num_workers, direct_running_mode="multi_processing"
Expand Down