Skip to content

Commit

Permalink
Merge branch 'master' into runid-sort
Browse files Browse the repository at this point in the history
  • Loading branch information
dschwoerer authored Jan 9, 2023
2 parents e23d622 + afb5ba6 commit 5c82b92
Show file tree
Hide file tree
Showing 12 changed files with 765 additions and 67 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
if: always()
strategy:
matrix:
python-version: [3.7, 3.8, 3.9]
python-version: [3.8, 3.9, '3.10']
pip-packages:
- "setuptools pip pytest pytest-cov coverage codecov boutdata xarray numpy>=1.16.0"
fail-fast: false
Expand All @@ -33,7 +33,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
sudo apt-get install libhdf5-dev libnetcdf-dev
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
python -m pip install --upgrade pip
pip install --upgrade ${{ matrix.pip-packages }}
pip install -e .
Expand All @@ -53,7 +53,7 @@ jobs:
if: always()
strategy:
matrix:
python-version: [3.7, 3.8]
python-version: [3.8]
pip-packages:
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
fail-fast: false
Expand All @@ -66,7 +66,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
sudo apt-get install libhdf5-dev libnetcdf-dev
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
python -m pip install --upgrade pip
pip install --upgrade ${{ matrix.pip-packages }}
pip install -e .
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/pythonpackage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
sudo apt-get install libhdf5-dev libnetcdf-dev
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
python -m pip install --upgrade pip
pip install --upgrade ${{ matrix.pip-packages }}
pip install -e .
Expand Down Expand Up @@ -66,7 +66,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
sudo apt-get install libhdf5-dev libnetcdf-dev
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
python -m pip install --upgrade pip
pip install --upgrade ${{ matrix.pip-packages }}
pip install -e .
Expand Down
10 changes: 5 additions & 5 deletions .github/workflows/pythonpublish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
if: always()
strategy:
matrix:
python-version: [3.7, 3.8, 3.9]
python-version: [3.8, 3.9, '3.10']
pip-packages:
- "setuptools pip pytest pytest-cov coverage codecov boutdata xarray numpy>=1.16.0"
fail-fast: true
Expand All @@ -27,7 +27,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
sudo apt-get install libhdf5-dev libnetcdf-dev
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
python -m pip install --upgrade pip
pip install --upgrade ${{ matrix.pip-packages }}
pip install -e .
Expand All @@ -47,7 +47,7 @@ jobs:
if: always()
strategy:
matrix:
python-version: [3.7, 3.8]
python-version: [3.8]
pip-packages:
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
fail-fast: true
Expand All @@ -60,7 +60,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
sudo apt-get install libhdf5-dev libnetcdf-dev
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
python -m pip install --upgrade pip
pip install --upgrade ${{ matrix.pip-packages }}
pip install -e .
Expand Down Expand Up @@ -123,7 +123,7 @@ jobs:
python-version: '3.x'
- name: Install dependencies
run: |
sudo apt-get install libhdf5-dev libnetcdf-dev
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
python -m pip install --upgrade pip
pip install --upgrade setuptools wheel twine
pip install -e .
Expand Down
6 changes: 5 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ setup_requires =
setuptools_scm[toml]>=3.4
setuptools_scm_git_archive
install_requires =
xarray>=0.18.0,!=2022.9.0,!=2022.10.0
xarray>=0.18.0,!=2022.9.0,!=2022.10.0,!=2022.11.0,!=2022.12.0
boutdata>=0.1.4
dask[array]>=2.10.0
gelidum>=0.5.3
Expand All @@ -50,6 +50,10 @@ calc =
xrft
xhistogram
docs = sphinx >= 1.4
3d_plot =
k3d >= 2.8.0
mayavi >= 4.7.2
wand

[build_sphinx]
project = $metadata.name
Expand Down
20 changes: 20 additions & 0 deletions xbout/boutdataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,6 +385,9 @@ def interpolate_parallel(

return da

def add_cartesian_coordinates(self):
return _add_cartesian_coordinates(self.data)

def add_cartesian_coordinates(self):
"""
Add Cartesian (X,Y,Z) coordinates.
Expand Down Expand Up @@ -1077,3 +1080,20 @@ def plot_regions(self, ax=None, **kwargs):
tokamak topology.
"""
return plotfuncs.plot_regions(self.data, ax=ax, **kwargs)

def plot3d(self, ax=None, **kwargs):
"""
Make a 3d plot
Warnings
--------
3d plotting functionality is still a bit of a work in progress. Bugs are likely, and
help developing is welcome!
Parameters
----------
See plotfuncs.plot3d()
"""
return plotfuncs.plot3d(self.data, **kwargs)
3 changes: 3 additions & 0 deletions xbout/boutdataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,9 @@ def find_with_dims(first_var, dims):

return ds

def add_cartesian_coordinates(self):
return _add_cartesian_coordinates(self.data)

def integrate_midpoints(self, variable, *, dims=None, cumulative_t=False):
"""
Integrate using the midpoint rule for spatial dimensions, and trapezium rule for
Expand Down
121 changes: 106 additions & 15 deletions xbout/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,17 @@
"MYPE",
]
_BOUT_TIME_DEPENDENT_META_VARS = ["iteration", "hist_hi", "tt"]
_BOUT_GEOMETRY_VARS = [
"ixseps1",
"ixseps2",
"jyseps1_1",
"jyseps2_1",
"jyseps1_2",
"jyseps2_2",
"nx",
"ny",
"ny_inner",
]


# This code should run whenever any function from this module is imported
Expand Down Expand Up @@ -350,6 +361,10 @@ def attrs_remove_section(obj, section):
pass
else:
raise ValueError(msg)
for v in _BOUT_GEOMETRY_VARS:
if v not in ds.metadata and v in grid:
ds.metadata[v] = grid[v].values

# Update coordinates to match particular geometry of grid
ds = geometries.apply_geometry(ds, geometry, grid=grid)

Expand All @@ -365,6 +380,42 @@ def attrs_remove_section(obj, section):
# BOUT++
ds.bout.fine_interpolation_factor = 8

if ("dump" in input_type or "restart" in input_type) and ds.metadata[
"BOUT_VERSION"
] < 4.0:
# Add workarounds for missing information or different conventions in data saved
# by BOUT++ v3.x.
for v in ds:
if ds.metadata["bout_zdim"] in ds[v].dims:
# All fields saved on aligned grid for BOUT-3
ds[v].attrs["direction_y"] = "Aligned"

added_location = False
if any(
d in ds[v].dims
for d in (
ds.metadata["bout_xdim"],
ds.metadata["bout_ydim"],
ds.metadata["bout_zdim"],
)
):
# zShift, etc. did not support staggered grids in BOUT++ v3 anyway, so
# just treat all variables as if they were at CELL_CENTRE
ds[v].attrs["cell_location"] = "CELL_CENTRE"
added_location = True
if added_location:
warn(
"Detected data from BOUT++ v3.x. Treating all variables as being "
"at `CELL_CENTRE`. Should be similar to what BOUT++ v3.x did, but "
"if your code uses staggered grids, this may produce unexpected "
"effects in some places."
)

if "nz" not in ds.metadata:
# `nz` used to be stored as `MZ` and `MZ` used to include an extra buffer
# point that was not used for data.
ds.metadata["nz"] = ds.metadata["MZ"] - 1

if info == "terse":
print("Read in dataset from {}".format(str(Path(datapath))))
elif info:
Expand Down Expand Up @@ -600,17 +651,40 @@ def _auto_open_mfboutdataset(

paths_grid, concat_dims = _arrange_for_concatenation(filepaths, nxpe, nype)

ds = xr.open_mfdataset(
paths_grid,
concat_dim=concat_dims,
combine="nested",
data_vars=data_vars,
preprocess=_preprocess,
engine=filetype,
chunks=chunks,
join="exact",
**kwargs,
)
try:
ds = xr.open_mfdataset(
paths_grid,
concat_dim=concat_dims,
combine="nested",
data_vars=data_vars,
preprocess=_preprocess,
engine=filetype,
chunks=chunks,
join="exact",
**kwargs,
)
except ValueError as e:
message_to_catch = (
"some variables in data_vars are not data variables on the first "
"dataset:"
)
if str(e)[: len(message_to_catch)] == message_to_catch:
# Open concatenating any variables that are different in
# different files as a work around to support opening older
# data.
ds = xr.open_mfdataset(
paths_grid,
concat_dim=concat_dims,
combine="nested",
data_vars="different",
preprocess=_preprocess,
engine=filetype,
chunks=chunks,
join="exact",
**kwargs,
)
else:
raise
else:
# datapath was nested list of Datasets

Expand Down Expand Up @@ -744,8 +818,16 @@ def get_nonnegative_scalar(ds, key, default=1, info=True):

# Check whether this is a single file squashed from the multiple output files of a
# parallel run (i.e. NXPE*NYPE > 1 even though there is only a single file to read).
nx = ds["nx"].values
ny = ds["ny"].values
if "nx" in ds:
nx = ds["nx"].values
else:
# Workaround for older data files
nx = ds["MXSUB"].values * ds["NXPE"].values + 2 * ds["MXG"].values
if "ny" in ds:
ny = ds["ny"].values
else:
# Workaround for older data files
ny = ds["MYSUB"].values * ds["NYPE"].values
nx_file = ds.dims["x"]
ny_file = ds.dims["y"]
is_squashed_doublenull = False
Expand All @@ -758,7 +840,10 @@ def get_nonnegative_scalar(ds, key, default=1, info=True):
mxg = 0

# Check if there are two divertor targets present
if ds["jyseps1_2"] > ds["jyseps2_1"]:
# Note: if jyseps2_1 and jyseps1_2 are not in ds it probably
# indicates older data and likely the upper target boundary cells
# were not saved anyway, so continue as if they were not.
if "jyseps2_1" in ds and ds["jyseps1_2"] > ds["jyseps2_1"]:
upper_target_cells = myg
else:
upper_target_cells = 0
Expand All @@ -771,7 +856,13 @@ def get_nonnegative_scalar(ds, key, default=1, info=True):

nxpe = 1
nype = 1
is_squashed_doublenull = (ds["jyseps2_1"] != ds["jyseps1_2"]).values
if "jyseps2_1" in ds:
is_squashed_doublenull = (ds["jyseps2_1"] != ds["jyseps1_2"]).values
else:
# For older data with no jyseps2_1 or jyseps1_2 in the
# dataset, probably do not need to handle double null data
# squashed with upper target points.
is_squashed_doublenull = False
elif ny_file == ny + 2 * myg:
# Older squashed file from double-null grid but containing only lower
# target boundary cells.
Expand Down
2 changes: 1 addition & 1 deletion xbout/plotting/animate.py
Original file line number Diff line number Diff line change
Expand Up @@ -600,7 +600,7 @@ def animate_line(

# Check plot is the right orientation
t_read, x_read = data.dims
if t_read is animate_over:
if t_read == animate_over:
x = x_read
else:
data = data.transpose(animate_over, t_read, transpose_coords=True)
Expand Down
Loading

0 comments on commit 5c82b92

Please sign in to comment.