From 1277532e861dd1c4b88cdc92ef0ee5536bf9ed25 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sun, 15 Mar 2020 22:12:52 +0000 Subject: [PATCH 01/94] Chunk grid file after removing upper boundary points Need to re-chunk grid after using xr.concat to re-join the grid Dataset with upper boundary points removed. Otherwise 'y' is re-chunked into at least two parts. --- xbout/load.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/xbout/load.py b/xbout/load.py index 2433accc..0a1eaf9d 100644 --- a/xbout/load.py +++ b/xbout/load.py @@ -583,9 +583,6 @@ def _open_grid(datapath, chunks, keep_xboundaries, keep_yboundaries, mxg=2): gridfilepath = Path(datapath) grid = xr.open_dataset(gridfilepath, engine=_check_filetype(gridfilepath)) - if 'z' in grid_chunks and 'z' not in grid.dims: - del grid_chunks['z'] - grid = grid.chunk(grid_chunks) # TODO find out what 'yup_xsplit' etc are in the doublenull storm file John gave me # For now drop any variables with extra dimensions @@ -622,4 +619,9 @@ def _open_grid(datapath, chunks, keep_xboundaries, keep_yboundaries, mxg=2): grid = xr.concat((grid_lower, grid_upper), dim='y', data_vars='minimal', compat='identical', join='exact') + + if 'z' in grid_chunks and 'z' not in grid.dims: + del grid_chunks['z'] + grid = grid.chunk(grid_chunks) + return grid From bd0e88d0c0803d7cf472de132ed7aab9428ba27c Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sun, 15 Mar 2020 23:46:20 +0000 Subject: [PATCH 02/94] Methods for parallel interpolation of variables --- xbout/boutdataset.py | 117 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 117 insertions(+) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 44ea795b..f1089a4d 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -78,6 +78,123 @@ def getFieldAligned(self, name, caching=True): self.data[aligned_name] = self.data[name].bout.toFieldAligned() return self.data[aligned_name] + def setupParallelInterp(self, n=8): + """ + Set parameters and do some initialisation for parallel interpolation. + + At present this only supports output on a regular grid (constant spacing in y). + This is what Hypnotoad grids provide, but is not a requirement of BOUT++. + + Parameters + ----------- + n : int, optional + Factor to increase parallel resolution by + """ + + ds = self.data + ds.metadata['fine_interpolation_factor'] = n + for da in ds.values(): + da.metadata['fine_interpolation_factor'] = n + + return ds + + def getHighParallelResRegion(self, var, region, n=None, toroidal_points=None, + method='cubic'): + """ + Interpolate in the parallel direction to get a higher resolution version of the + variable in a certain region + + Parameters + ---------- + var : str + Name of the variable to interpolate + region : str + The region to calculate the output in + n : int, optional + The factor to increase the resolution by. Defaults to the value set by + BoutDataset.setupParallelInterp(), or 10 if that has not been called. + toroidal_points : int or sequence of int, optional + If int, number of toroidal points to output, applies a stride to toroidal + direction to save memory usage. If sequence of int, the indexes of toroidal + points for the output. + method : str, optional + The interpolation method to use. Options from xarray.DataArray.interp(), + currently: linear, nearest, zero, slinear, quadratic, cubic. Default is + 'cubic'. + """ + + ds = self.data + xcoord = ds.metadata['bout_xdim'] + ycoord = ds.metadata['bout_ydim'] + zcoord = ds.metadata['bout_zdim'] + + try: + da = ds[var + '_' + region + '_fine'] + if isinstance(toroidal_points, int): + if len(da[zcoord]) != toroidal_points: + raise KeyError + else: + if ds[zcoord][toroidal_points] != da[zcoord]: + raise KeyError + return da + except KeyError: + pass + + da = ds[var] + if zcoord in da.dims and da.direction_y != 'Aligned': + aligned_input = False + da = ds.bout.getFieldAligned(var) + else: + aligned_input = True + + if n is None: + try: + n = self.data.metadata['fine_interpolation_factor'] + except KeyError: + n = 10 + + da = da.bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) + da = da.chunk({ycoord: None}) + dy = ds['dy'].bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) + + ny_local = len(da[ycoord]) + if region.connection_lower is None: + ystart = da[ycoord][0] - dy.isel(**{xcoord: 0, ycoord:0})/2. + else: + ystart = da[ycoord][2] - dy.isel(**{xcoord: 0, ycoord:2})/2. + ny_local -= 2 + if region.connection_upper is None: + yend = da[ycoord][-1] + dy.isel(**{xcoord: 0, ycoord:-1})/2. + else: + yend = da[ycoord][-3] + dy.isel(**{xcoord: 0, ycoord:-3})/2. + ny_local -= 2 + + ny_fine = n*ny_local + 1 + y_fine = np.linspace(ystart, yend, ny_fine) + + da = da.interp({ycoord: y_fine.data}, assume_sorted=True, method=method) + + if not aligned_input: + # Want output in non-aligned coordinates + da = da.bout.fromFieldAligned( + ds.bout.getHighParallelResRegion(var='zShift', region=region.name, + n=n, method=method) + ) + + if toroidal_points is not None: + if isinstance(toroidal_points, int): + nz = len(da[zcoord]) + zstride = nz//toroidal_points + da = da.isel(**{zcoord: slice(None, None, zstride)}) + else: + da = da.isel(**{zcoord: toroidal_points}) + + da.name = var + '_' + region.name + '_fine' + + ds[var + '_' + region.name + '_fine'] = da + + return da + def save(self, savepath='./boutdata.nc', filetype='NETCDF4', variables=None, save_dtype=None, separate_vars=False, pre_load=False): """ From f21d3d6ec5c20bcfcdd95064046a9fea69f1b3f8 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 16 Mar 2020 08:36:07 +0000 Subject: [PATCH 03/94] Caching argument for getHighParallelResRegion If set to false, does not store the results in the Dataset, in order to save memory. --- xbout/boutdataset.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index f1089a4d..bf1722f5 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -99,7 +99,7 @@ def setupParallelInterp(self, n=8): return ds def getHighParallelResRegion(self, var, region, n=None, toroidal_points=None, - method='cubic'): + method='cubic', caching=True): """ Interpolate in the parallel direction to get a higher resolution version of the variable in a certain region @@ -121,6 +121,9 @@ def getHighParallelResRegion(self, var, region, n=None, toroidal_points=None, The interpolation method to use. Options from xarray.DataArray.interp(), currently: linear, nearest, zero, slinear, quadratic, cubic. Default is 'cubic'. + caching : bool, optional + Save the interpolated results in the Dataset (the default). Can be set to + False to save memory. """ ds = self.data @@ -176,6 +179,7 @@ def getHighParallelResRegion(self, var, region, n=None, toroidal_points=None, if not aligned_input: # Want output in non-aligned coordinates + # Note: always caching zShift as storing a single Field2D is not expensive da = da.bout.fromFieldAligned( ds.bout.getHighParallelResRegion(var='zShift', region=region.name, n=n, method=method) @@ -191,7 +195,8 @@ def getHighParallelResRegion(self, var, region, n=None, toroidal_points=None, da.name = var + '_' + region.name + '_fine' - ds[var + '_' + region.name + '_fine'] = da + if caching: + ds[var + '_' + region.name + '_fine'] = da return da From f600aaaba274e7c2552980eab658487710db4bdf Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 16 Mar 2020 10:27:21 +0000 Subject: [PATCH 04/94] Cache high-res variables in Region, not in Dataset --- xbout/boutdataset.py | 24 ++++++++++++------------ xbout/region.py | 1 + 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index bf1722f5..d0d41df0 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -127,21 +127,23 @@ def getHighParallelResRegion(self, var, region, n=None, toroidal_points=None, """ ds = self.data + region = ds.regions[region] xcoord = ds.metadata['bout_xdim'] ycoord = ds.metadata['bout_ydim'] zcoord = ds.metadata['bout_zdim'] - try: - da = ds[var + '_' + region + '_fine'] + if region.da_highres is not None: + result = region.da_highres + # as long as requested toroidal_points match the cached version, can return + # cached version if isinstance(toroidal_points, int): - if len(da[zcoord]) != toroidal_points: - raise KeyError + if len(result[zcoord]) == toroidal_points: + return result else: - if ds[zcoord][toroidal_points] != da[zcoord]: - raise KeyError - return da - except KeyError: - pass + if result[zcoord] == ds[zcoord][toroidal_points]: + return result + # toroidal_points did not match, so need to re-calculate + region.da_highres = None da = ds[var] if zcoord in da.dims and da.direction_y != 'Aligned': @@ -193,10 +195,8 @@ def getHighParallelResRegion(self, var, region, n=None, toroidal_points=None, else: da = da.isel(**{zcoord: toroidal_points}) - da.name = var + '_' + region.name + '_fine' - if caching: - ds[var + '_' + region.name + '_fine'] = da + region.da_highres = da return da diff --git a/xbout/region.py b/xbout/region.py index 873ee664..75e53c45 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -52,6 +52,7 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, self.connection_outer = connect_outer self.connection_lower = connect_lower self.connection_upper = connect_upper + self.da_highres = None if ds is not None: # calculate start and end coordinates From f84d91996609ea2d3d105bd7b0fd733095d572fa Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 16 Mar 2020 10:40:18 +0000 Subject: [PATCH 05/94] Move highParallelResRegion to BoutDataArray from BoutDataset Making zShift a coordinate means that the Dataset is no longer required for the interpolation, so move the method to the BoutDataArray. --- xbout/boutdataarray.py | 97 +++++++++++++++++++++++++++++++++++++++ xbout/boutdataset.py | 102 ----------------------------------------- 2 files changed, 97 insertions(+), 102 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 3f879ac3..29902944 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -372,6 +372,103 @@ def fromRegion(self, region, with_guards=None): return da + def highParallelResRegion(self, region, n=None, toroidal_points=None, + method='cubic', caching=True): + """ + Interpolate in the parallel direction to get a higher resolution version of the + variable in a certain region + + Parameters + ---------- + region : str + The region to calculate the output in + n : int, optional + The factor to increase the resolution by. Defaults to the value set by + BoutDataset.setupParallelInterp(), or 10 if that has not been called. + toroidal_points : int or sequence of int, optional + If int, number of toroidal points to output, applies a stride to toroidal + direction to save memory usage. If sequence of int, the indexes of toroidal + points for the output. + method : str, optional + The interpolation method to use. Options from xarray.DataArray.interp(), + currently: linear, nearest, zero, slinear, quadratic, cubic. Default is + 'cubic'. + caching : bool, optional + Save the interpolated results in the Dataset (the default). Can be set to + False to save memory. + """ + + da = self.data + region = da.regions[region] + xcoord = da.metadata['bout_xdim'] + ycoord = da.metadata['bout_ydim'] + zcoord = da.metadata['bout_zdim'] + + if region.da_highres is not None: + result = region.da_highres + # as long as requested toroidal_points match the cached version, can return + # cached version + if isinstance(toroidal_points, int): + if len(result[zcoord]) == toroidal_points: + return result + else: + if da[zcoord][toroidal_points] == result[zcoord]: + return result + # toroidal_points did not match, so need to re-calculate + region.da_highres = None + + if zcoord in da.dims and da.direction_y != 'Aligned': + aligned_input = False + da = da.bout.toFieldAligned() + else: + aligned_input = True + + if n is None: + try: + n = self.data.metadata['fine_interpolation_factor'] + except KeyError: + n = 10 + + da = da.bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) + da = da.chunk({ycoord: None}) + dy = da['dy'].bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) + + ny_local = len(da[ycoord]) + if region.connection_lower is None: + ystart = da[ycoord][0] - dy.isel(**{xcoord: 0, ycoord:0})/2. + else: + ystart = da[ycoord][2] - dy.isel(**{xcoord: 0, ycoord:2})/2. + ny_local -= 2 + if region.connection_upper is None: + yend = da[ycoord][-1] + dy.isel(**{xcoord: 0, ycoord:-1})/2. + else: + yend = da[ycoord][-3] + dy.isel(**{xcoord: 0, ycoord:-3})/2. + ny_local -= 2 + + ny_fine = n*ny_local + 1 + y_fine = np.linspace(ystart, yend, ny_fine) + + da = da.interp({ycoord: y_fine.data}, assume_sorted=True, method=method) + + if not aligned_input: + # Want output in non-aligned coordinates + # Note: always caching zShift as storing a single Field2D is not expensive + da = da.bout.fromFieldAligned() + + if toroidal_points is not None: + if isinstance(toroidal_points, int): + nz = len(da[zcoord]) + zstride = nz//toroidal_points + da = da.isel(**{zcoord: slice(None, None, zstride)}) + else: + da = da.isel(**{zcoord: toroidal_points}) + + if caching: + region.da_highres = da + + return da + + def animate2D(self, animate_over='t', x=None, y=None, animate=True, fps=10, save_as=None, ax=None, poloidal_plot=False, logscale=None, **kwargs): """ diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index d0d41df0..37ca385b 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -98,108 +98,6 @@ def setupParallelInterp(self, n=8): return ds - def getHighParallelResRegion(self, var, region, n=None, toroidal_points=None, - method='cubic', caching=True): - """ - Interpolate in the parallel direction to get a higher resolution version of the - variable in a certain region - - Parameters - ---------- - var : str - Name of the variable to interpolate - region : str - The region to calculate the output in - n : int, optional - The factor to increase the resolution by. Defaults to the value set by - BoutDataset.setupParallelInterp(), or 10 if that has not been called. - toroidal_points : int or sequence of int, optional - If int, number of toroidal points to output, applies a stride to toroidal - direction to save memory usage. If sequence of int, the indexes of toroidal - points for the output. - method : str, optional - The interpolation method to use. Options from xarray.DataArray.interp(), - currently: linear, nearest, zero, slinear, quadratic, cubic. Default is - 'cubic'. - caching : bool, optional - Save the interpolated results in the Dataset (the default). Can be set to - False to save memory. - """ - - ds = self.data - region = ds.regions[region] - xcoord = ds.metadata['bout_xdim'] - ycoord = ds.metadata['bout_ydim'] - zcoord = ds.metadata['bout_zdim'] - - if region.da_highres is not None: - result = region.da_highres - # as long as requested toroidal_points match the cached version, can return - # cached version - if isinstance(toroidal_points, int): - if len(result[zcoord]) == toroidal_points: - return result - else: - if result[zcoord] == ds[zcoord][toroidal_points]: - return result - # toroidal_points did not match, so need to re-calculate - region.da_highres = None - - da = ds[var] - if zcoord in da.dims and da.direction_y != 'Aligned': - aligned_input = False - da = ds.bout.getFieldAligned(var) - else: - aligned_input = True - - if n is None: - try: - n = self.data.metadata['fine_interpolation_factor'] - except KeyError: - n = 10 - - da = da.bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) - da = da.chunk({ycoord: None}) - dy = ds['dy'].bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) - - ny_local = len(da[ycoord]) - if region.connection_lower is None: - ystart = da[ycoord][0] - dy.isel(**{xcoord: 0, ycoord:0})/2. - else: - ystart = da[ycoord][2] - dy.isel(**{xcoord: 0, ycoord:2})/2. - ny_local -= 2 - if region.connection_upper is None: - yend = da[ycoord][-1] + dy.isel(**{xcoord: 0, ycoord:-1})/2. - else: - yend = da[ycoord][-3] + dy.isel(**{xcoord: 0, ycoord:-3})/2. - ny_local -= 2 - - ny_fine = n*ny_local + 1 - y_fine = np.linspace(ystart, yend, ny_fine) - - da = da.interp({ycoord: y_fine.data}, assume_sorted=True, method=method) - - if not aligned_input: - # Want output in non-aligned coordinates - # Note: always caching zShift as storing a single Field2D is not expensive - da = da.bout.fromFieldAligned( - ds.bout.getHighParallelResRegion(var='zShift', region=region.name, - n=n, method=method) - ) - - if toroidal_points is not None: - if isinstance(toroidal_points, int): - nz = len(da[zcoord]) - zstride = nz//toroidal_points - da = da.isel(**{zcoord: slice(None, None, zstride)}) - else: - da = da.isel(**{zcoord: toroidal_points}) - - if caching: - region.da_highres = da - - return da - def save(self, savepath='./boutdata.nc', filetype='NETCDF4', variables=None, save_dtype=None, separate_vars=False, pre_load=False): """ From ae5009410d698c7321495f49ef5b017ccdec2891 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 16 Mar 2020 10:42:27 +0000 Subject: [PATCH 06/94] When interpolating, extrapolate at the boundaries if necessary Provides a workaround for when the boundary cells were not saved in the data file. --- xbout/boutdataarray.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 29902944..d1c83442 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -448,7 +448,8 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, ny_fine = n*ny_local + 1 y_fine = np.linspace(ystart, yend, ny_fine) - da = da.interp({ycoord: y_fine.data}, assume_sorted=True, method=method) + da = da.interp({ycoord: y_fine.data}, assume_sorted=True, method=method, + kwargs={'fill_value': 'extrapolate'}) if not aligned_input: # Want output in non-aligned coordinates From 51e04d2049d5983fd40bf9f95fffb0bcb1eb066d Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 16 Mar 2020 13:10:35 +0000 Subject: [PATCH 07/94] Remove uses of dx and dy in highParallelResRegion Use coordinate ranges stored in Region instead. --- xbout/boutdataarray.py | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index d1c83442..b3b686e7 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -431,22 +431,9 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, da = da.bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) da = da.chunk({ycoord: None}) - dy = da['dy'].bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) - ny_local = len(da[ycoord]) - if region.connection_lower is None: - ystart = da[ycoord][0] - dy.isel(**{xcoord: 0, ycoord:0})/2. - else: - ystart = da[ycoord][2] - dy.isel(**{xcoord: 0, ycoord:2})/2. - ny_local -= 2 - if region.connection_upper is None: - yend = da[ycoord][-1] + dy.isel(**{xcoord: 0, ycoord:-1})/2. - else: - yend = da[ycoord][-3] + dy.isel(**{xcoord: 0, ycoord:-3})/2. - ny_local -= 2 - - ny_fine = n*ny_local + 1 - y_fine = np.linspace(ystart, yend, ny_fine) + ny_fine = n*region.ny + 1 + y_fine = np.linspace(region.ylower, region.yupper, ny_fine) da = da.interp({ycoord: y_fine.data}, assume_sorted=True, method=method, kwargs={'fill_value': 'extrapolate'}) From 2e63880d39100c3015158512f8cee62604f66f63 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 16 Mar 2020 18:13:41 +0000 Subject: [PATCH 08/94] Method to create high resolutions of a variable for all regions Returns a dict. --- xbout/boutdataarray.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index b3b686e7..f6f8ea0e 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -457,6 +457,38 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, return da + def highParallelRes(self, **kwargs): + """ + Interpolate in the parallel direction to get a higher resolution version of the + variable. + + Parameters + ---------- + n : int, optional + The factor to increase the resolution by. Defaults to the value set by + BoutDataset.setupParallelInterp(), or 10 if that has not been called. + toroidal_points : int or sequence of int, optional + If int, number of toroidal points to output, applies a stride to toroidal + direction to save memory usage. If sequence of int, the indexes of toroidal + points for the output. + method : str, optional + The interpolation method to use. Options from xarray.DataArray.interp(), + currently: linear, nearest, zero, slinear, quadratic, cubic. Default is + 'cubic'. + caching : bool, optional + Save the interpolated results in the Dataset (the default). Can be set to + False to save memory. + + Returns + ------- + A dict whose keys are the names of regions and whose values are the + high-resolution data of the variable in those regions. + """ + + return {region: self.highParallelResRegion(region, **kwargs) + for region in self.data.regions} + + def animate2D(self, animate_over='t', x=None, y=None, animate=True, fps=10, save_as=None, ax=None, poloidal_plot=False, logscale=None, **kwargs): """ From 9213bff9d885daa6ce1aed2cd5ffd30b112e5cfe Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 16 Mar 2020 22:13:53 +0000 Subject: [PATCH 09/94] Remove caching of high-resolution variables Instead, will provide functionality to save the high-resolution variables into a new Dataset. --- xbout/boutdataarray.py | 25 +------------------------ xbout/region.py | 1 - 2 files changed, 1 insertion(+), 25 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index f6f8ea0e..bff72001 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -373,7 +373,7 @@ def fromRegion(self, region, with_guards=None): return da def highParallelResRegion(self, region, n=None, toroidal_points=None, - method='cubic', caching=True): + method='cubic'): """ Interpolate in the parallel direction to get a higher resolution version of the variable in a certain region @@ -393,9 +393,6 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, The interpolation method to use. Options from xarray.DataArray.interp(), currently: linear, nearest, zero, slinear, quadratic, cubic. Default is 'cubic'. - caching : bool, optional - Save the interpolated results in the Dataset (the default). Can be set to - False to save memory. """ da = self.data @@ -404,19 +401,6 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, ycoord = da.metadata['bout_ydim'] zcoord = da.metadata['bout_zdim'] - if region.da_highres is not None: - result = region.da_highres - # as long as requested toroidal_points match the cached version, can return - # cached version - if isinstance(toroidal_points, int): - if len(result[zcoord]) == toroidal_points: - return result - else: - if da[zcoord][toroidal_points] == result[zcoord]: - return result - # toroidal_points did not match, so need to re-calculate - region.da_highres = None - if zcoord in da.dims and da.direction_y != 'Aligned': aligned_input = False da = da.bout.toFieldAligned() @@ -440,7 +424,6 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, if not aligned_input: # Want output in non-aligned coordinates - # Note: always caching zShift as storing a single Field2D is not expensive da = da.bout.fromFieldAligned() if toroidal_points is not None: @@ -451,9 +434,6 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, else: da = da.isel(**{zcoord: toroidal_points}) - if caching: - region.da_highres = da - return da @@ -475,9 +455,6 @@ def highParallelRes(self, **kwargs): The interpolation method to use. Options from xarray.DataArray.interp(), currently: linear, nearest, zero, slinear, quadratic, cubic. Default is 'cubic'. - caching : bool, optional - Save the interpolated results in the Dataset (the default). Can be set to - False to save memory. Returns ------- diff --git a/xbout/region.py b/xbout/region.py index 75e53c45..873ee664 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -52,7 +52,6 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, self.connection_outer = connect_outer self.connection_lower = connect_lower self.connection_upper = connect_upper - self.da_highres = None if ds is not None: # calculate start and end coordinates From 845e2eb715a8678867390f30d011712f3a05a335 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 16 Mar 2020 22:28:56 +0000 Subject: [PATCH 10/94] Update definition of y_fine used for parallel interpolation Distribute the output points of the high-resolution field like a standard BOUT++ cell-centred variable. --- xbout/boutdataarray.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index bff72001..33bb2369 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -416,8 +416,22 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, da = da.bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) da = da.chunk({ycoord: None}) - ny_fine = n*region.ny + 1 - y_fine = np.linspace(region.ylower, region.yupper, ny_fine) + ny_fine = n*region.ny + dy = (region.yupper - region.ylower)/ny_fine + + myg = da.metadata['MYG'] + if da.metadata['keep_yboundaries'] and region.connection_lower is None: + ybndry_lower = myg + else: + ybndry_lower = 0 + if da.metadata['keep_yboundaries'] and region.connection_upper is None: + ybndry_upper = myg + else: + ybndry_upper = 0 + + y_fine = np.linspace(region.ylower - (ybndry_lower - 0.5)*dy, + region.yupper + (ybndry_upper - 0.5)*dy, + ny_fine + ybndry_lower + ybndry_upper) da = da.interp({ycoord: y_fine.data}, assume_sorted=True, method=method, kwargs={'fill_value': 'extrapolate'}) From 5ab0ecf550f454522830441522141914f79b40e3 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 16 Mar 2020 22:30:12 +0000 Subject: [PATCH 11/94] Update metadata of high-resolution variable Increase jyseps*, ny, ny_inner, MYSUB to reflect the new resolution. --- xbout/boutdataarray.py | 3 +++ xbout/utils.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 33bb2369..c5f62e2e 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -12,6 +12,7 @@ from .plotting import plotfuncs from .plotting.utils import _create_norm from .region import Region +from .utils import _update_metadata_increased_resolution @register_dataarray_accessor('bout') @@ -436,6 +437,8 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, da = da.interp({ycoord: y_fine.data}, assume_sorted=True, method=method, kwargs={'fill_value': 'extrapolate'}) + da = _update_metadata_increased_resolution(da, n) + if not aligned_input: # Want output in non-aligned coordinates da = da.bout.fromFieldAligned() diff --git a/xbout/utils.py b/xbout/utils.py index 5237a902..0530ce09 100644 --- a/xbout/utils.py +++ b/xbout/utils.py @@ -42,3 +42,34 @@ def _separate_metadata(ds): metadata = dict(zip(scalar_vars, metadata_vals)) return ds.drop(scalar_vars), metadata + +def _update_metadata_increased_resolution(da, n): + """ + Update the metadata variables to account for a y-direction resolution increased by a + factor n. + + Parameters + ---------- + da : DataArray + The variable to update + n : int + The factor to increase the y-resolution by + """ + + # Take deepcopy to ensure we do not alter metadata of other variables + da.attrs['metadata'] = deepcopy(da.metadata) + + def update_jyseps(name): + da.metadata[name] = n*(da.metadata[name] + 1) - 1 + update_jyseps('jyseps1_1') + update_jyseps('jyseps2_1') + update_jyseps('jyseps1_2') + update_jyseps('jyseps2_2') + + def update_ny(name): + da.metadata[name] = n*da.metadata[name] + update_ny('ny') + update_ny('ny_inner') + update_ny('MYSUB') + + return da From a74f32561341acdabffca355f41b1e5b2575282b Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 17 Mar 2020 11:25:38 +0000 Subject: [PATCH 12/94] Rename setupParallelInterp to resetParallelInterpFactor This method only actually does one thing, and is not required to be called before parallel interpolation, so rename to be clearer, and make 'n' argument non-optional. --- xbout/boutdataset.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 37ca385b..2a8d67f1 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -78,16 +78,13 @@ def getFieldAligned(self, name, caching=True): self.data[aligned_name] = self.data[name].bout.toFieldAligned() return self.data[aligned_name] - def setupParallelInterp(self, n=8): + def resetParallelInterpFactor(self, n): """ - Set parameters and do some initialisation for parallel interpolation. - - At present this only supports output on a regular grid (constant spacing in y). - This is what Hypnotoad grids provide, but is not a requirement of BOUT++. + Set the default factor to increase resolution when doing parallel interpolation. Parameters ----------- - n : int, optional + n : int Factor to increase parallel resolution by """ From 8ff50366061fa646207c8212f936952261a858c1 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 17 Mar 2020 11:27:08 +0000 Subject: [PATCH 13/94] BoutDataArray.highParallelRes returns a Dataset By converting the DataArrays in each region into Datasets, can combine with xarray.combine_by_coords. Then is natural to return a Dataset, making it more straightforward to merge the results of calling this method for several variables into a single Dataset. --- xbout/boutdataarray.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index c5f62e2e..00ad96b5 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -475,12 +475,13 @@ def highParallelRes(self, **kwargs): Returns ------- - A dict whose keys are the names of regions and whose values are the - high-resolution data of the variable in those regions. + A new Dataset containing a high-resolution version of the variable. """ - return {region: self.highParallelResRegion(region, **kwargs) - for region in self.data.regions} + return xr.combine_by_coords( + [self.highParallelResRegion(region, **kwargs).to_dataset() + for region in self.data.regions] + ) def animate2D(self, animate_over='t', x=None, y=None, animate=True, fps=10, From 2bb80f35c59ca655e734c55b3c09df69684a36df Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 17 Mar 2020 11:37:22 +0000 Subject: [PATCH 14/94] Update default to n=8 in highParallelResRegion --- xbout/boutdataarray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 00ad96b5..7589dea6 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -412,7 +412,7 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, try: n = self.data.metadata['fine_interpolation_factor'] except KeyError: - n = 10 + n = 8 da = da.bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) da = da.chunk({ycoord: None}) From 0f1c64a7c4eab8caa89ae4c29223997aeeb7edf5 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 17 Mar 2020 15:54:03 +0000 Subject: [PATCH 15/94] Remove regions attr when creating high-res interpolated variable Information in regions is not correct for the high-res variable, and needs to be recalculated later. --- xbout/boutdataarray.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 7589dea6..8536ae60 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -439,6 +439,11 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, da = _update_metadata_increased_resolution(da, n) + # Remove regions which have incorrect information for the high-resolution grid. + # New regions will be generated when creating a new Dataset in + # BoutDataset.getHighParallelResVars + del da.attrs['regions'] + if not aligned_input: # Want output in non-aligned coordinates da = da.bout.fromFieldAligned() From 67a293926139fd47801fe302239eb1eec5e2c1e7 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 17 Mar 2020 15:55:26 +0000 Subject: [PATCH 16/94] Workaround for combine_by_coords not keeping attrs In BoutDataArray.highParallelRes(), copy the attrs from the first part of the variable to the combined Dataset. --- xbout/boutdataarray.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 8536ae60..6984e780 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -483,10 +483,22 @@ def highParallelRes(self, **kwargs): A new Dataset containing a high-resolution version of the variable. """ - return xr.combine_by_coords( - [self.highParallelResRegion(region, **kwargs).to_dataset() + # xr.combine_by_coords does not keep attrs at the moment. See + # https://github.com/pydata/xarray/issues/3865 + # For now just copy the attrs from the first region. Can remove this workaround + # when the xarray issue is fixed. Should be able to use just: + #return xr.combine_by_coords( + # [self.highParallelResRegion(region, **kwargs).bout.to_dataset() + # for region in self.data.regions] + # ) + + parts = [self.highParallelResRegion(region, **kwargs).bout.to_dataset() for region in self.data.regions] - ) + + result = xr.combine_by_coords(parts) + result.attrs = parts[0].attrs + + return result def animate2D(self, animate_over='t', x=None, y=None, animate=True, fps=10, From 9f3a3d9b89ddcd6bd2823de4b1c42a43ad73db00 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 17 Mar 2020 18:52:14 +0000 Subject: [PATCH 17/94] Allow geometries to be re-applied to a Dataset After interpolating to higher parallel resolution, a Dataset has the correct coordinates, but no regions. This commit makes add_toroidal_geometry_coords and add_s_alpha_geometry_coords skip adding coordinates if the coordinates are already present, so that the functions can be applied again to interpolated Datasets. At the moment, the only thing this does is to re-create the regions. --- xbout/geometries.py | 221 +++++++++++++++++++++++--------------------- 1 file changed, 117 insertions(+), 104 deletions(-) diff --git a/xbout/geometries.py b/xbout/geometries.py index 302a3a58..d7b7f638 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -121,85 +121,95 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): coordinates = _set_default_toroidal_coordinates(coordinates) - # Check whether coordinates names conflict with variables in ds - bad_names = [name for name in coordinates.values() if name in ds] - if bad_names: - raise ValueError("Coordinate names {} clash with variables in the dataset. " - "Register a different geometry to provide alternative names. " - "It may be useful to use the 'coordinates' argument to " - "add_toroidal_geometry_coords() for this.".format(bad_names)) - - # Get extra geometry information from grid file if it's not in the dump files - needed_variables = ['psixy', 'Rxy', 'Zxy'] - for v in needed_variables: - if v not in ds: - if grid is None: - raise ValueError("Grid file is required to provide %s. Pass the grid " - "file name as the 'gridfilepath' argument to " - "open_boutdataset().") - ds[v] = grid[v] - - # Rename 't' if user requested it - ds = ds.rename(t=coordinates['t']) - - # Change names of dimensions to Orthogonal Toroidal ones - ds = ds.rename(y=coordinates['y']) - - # Add 1D Orthogonal Toroidal coordinates - # Make index 'x' a coordinate, useful for handling global indexing - nx = ds.dims['x'] - ds = ds.assign_coords(x=np.arange(nx)) - ny = ds.dims[coordinates['y']] - # dy should always be constant in x, so it is safe to slice to x=0. - # [The y-coordinate has to be a 1d coordinate that labels x-z slices of the grid - # (similarly x-coordinate is 1d coordinate that labels y-z slices and z-coordinate is - # a 1d coordinate that labels x-y slices). A coordinate might have different values - # in disconnected regions, but there are no branch-cuts allowed in the x-direction in - # BOUT++ (at least for the momement), so the y-coordinate has to be 1d and - # single-valued. Therefore similarly dy has to be 1d and single-valued.] - # Need drop=True so that the result does not have an x-coordinate value which - # prevents it being added as a coordinate. - dy = ds['dy'].isel(x=0, drop=True) - - # calculate theta at the centre of each cell - theta = dy.cumsum(keep_attrs=True) - dy/2. - ds = ds.assign_coords(**{coordinates['y']: theta}) - - # TODO automatically make this coordinate 1D in simplified cases? - ds = ds.rename(psixy=coordinates['x']) - ds = ds.set_coords(coordinates['x']) - ds[coordinates['x']].attrs['units'] = 'Wb' - - # Record which dimensions 't', 'x', and 'y' were renamed to. - ds.metadata['bout_tdim'] = coordinates['t'] - # x dimension not renamed, so this is still 'x' - ds.metadata['bout_xdim'] = 'x' - ds.metadata['bout_ydim'] = coordinates['y'] - - # If full data (not just grid file) then toroidal dim will be present - if 'z' in ds.dims: - ds = ds.rename(z=coordinates['z']) - nz = ds.dims[coordinates['z']] - phi = xr.DataArray(np.linspace(start=ds.metadata['ZMIN'], - stop=2 * np.pi * ds.metadata['ZMAX'], num=nz), - dims=coordinates['z']) - ds = ds.assign_coords(**{coordinates['z']: phi}) - - # Record which dimension 'z' was renamed to. - ds.metadata['bout_zdim'] = coordinates['z'] - - # Add 2D Cylindrical coordinates - if ('R' not in ds) and ('Z' not in ds): - ds = ds.rename(Rxy='R', Zxy='Z') - ds = ds.set_coords(('R', 'Z')) - else: - ds = ds.set_coords(('Rxy', 'Zxy')) - - # Add zShift as a coordinate, so that it gets interpolated along with a variable - try: - ds = ds.set_coords('zShift') - except KeyError: - pass + # If the coordinates already exist, we are re-applying the geometry and do not need to + # add them again. + # Ignore coordinates['z'] because ds might be Field2D-type without a z-dimension, and + # if the other coordinates all match for a Field3D-type ds, we must actually be + # re-applying the geometry. + # Ignore coordinates['t'] because we do not rename 't' or make a t-coordinate + if not np.all([c in ds.coords or c == coordinates['z'] or c == coordinates['t'] + for c in coordinates.values()]): + + # Check whether coordinates names conflict with variables in ds + bad_names = [name for name in coordinates.values() if name in ds and name not in + ds.coords] + if bad_names: + raise ValueError("Coordinate names {} clash with variables in the dataset. " + "Register a different geometry to provide alternative names. " + "It may be useful to use the 'coordinates' argument to " + "add_toroidal_geometry_coords() for this.".format(bad_names)) + + # Get extra geometry information from grid file if it's not in the dump files + needed_variables = ['psixy', 'Rxy', 'Zxy'] + for v in needed_variables: + if v not in ds: + if grid is None: + raise ValueError("Grid file is required to provide %s. Pass the grid " + "file name as the 'gridfilepath' argument to " + "open_boutdataset().") + ds[v] = grid[v] + + # Rename 't' if user requested it + ds = ds.rename(t=coordinates['t']) + + # Change names of dimensions to Orthogonal Toroidal ones + ds = ds.rename(y=coordinates['y']) + + # Add 1D Orthogonal Toroidal coordinates + # Make index 'x' a coordinate, useful for handling global indexing + nx = ds.dims['x'] + ds = ds.assign_coords(x=np.arange(nx)) + ny = ds.dims[coordinates['y']] + # dy should always be constant in x, so it is safe to slice to x=0. + # [The y-coordinate has to be a 1d coordinate that labels x-z slices of the grid + # (similarly x-coordinate is 1d coordinate that labels y-z slices and + # z-coordinate is a 1d coordinate that labels x-y slices). A coordinate might + # have different values in disconnected regions, but there are no branch-cuts + # allowed in the x-direction in BOUT++ (at least for the momement), so the + # y-coordinate has to be 1d and single-valued. Therefore similarly dy has to be + # 1d and single-valued.] Need drop=True so that the result does not have an + # x-coordinate value which prevents it being added as a coordinate. + dy = ds['dy'].isel(x=0, drop=True) + + # calculate theta at the centre of each cell + theta = dy.cumsum(keep_attrs=True) - dy/2. + ds = ds.assign_coords(**{coordinates['y']: theta}) + + # TODO automatically make this coordinate 1D in simplified cases? + ds = ds.rename(psixy=coordinates['x']) + ds = ds.set_coords(coordinates['x']) + ds[coordinates['x']].attrs['units'] = 'Wb' + + # Record which dimensions 't', 'x', and 'y' were renamed to. + ds.metadata['bout_tdim'] = coordinates['t'] + # x dimension not renamed, so this is still 'x' + ds.metadata['bout_xdim'] = 'x' + ds.metadata['bout_ydim'] = coordinates['y'] + + # If full data (not just grid file) then toroidal dim will be present + if 'z' in ds.dims: + ds = ds.rename(z=coordinates['z']) + nz = ds.dims[coordinates['z']] + phi = xr.DataArray(np.linspace(start=ds.metadata['ZMIN'], + stop=2 * np.pi * ds.metadata['ZMAX'], num=nz), + dims=coordinates['z']) + ds = ds.assign_coords(**{coordinates['z']: phi}) + + # Record which dimension 'z' was renamed to. + ds.metadata['bout_zdim'] = coordinates['z'] + + # Add 2D Cylindrical coordinates + if ('R' not in ds) and ('Z' not in ds): + ds = ds.rename(Rxy='R', Zxy='Z') + ds = ds.set_coords(('R', 'Z')) + else: + ds = ds.set_coords(('Rxy', 'Zxy')) + + # Add zShift as a coordinate, so that it gets interpolated along with a variable + try: + ds = ds.set_coords('zShift') + except KeyError: + pass ds = _create_regions_toroidal(ds) @@ -211,32 +221,35 @@ def add_s_alpha_geometry_coords(ds, *, coordinates=None, grid=None): coordinates = _set_default_toroidal_coordinates(coordinates) - # Add 'hthe' from grid file, needed below for radial coordinate - if 'hthe' not in ds: - hthe_from_grid = True - if grid is None: - raise ValueError("Grid file is required to provide %s. Pass the grid " - "file name as the 'gridfilepath' argument to " - "open_boutdataset().") - ds['hthe'] = grid['hthe'] - else: - hthe_from_grid = False - ds = add_toroidal_geometry_coords(ds, coordinates=coordinates, grid=grid) - # Add 1D radial coordinate - if 'r' in ds: - raise ValueError("Cannot have variable 'r' in dataset when using " - "geometry='s-alpha'") - ds['r'] = ds['hthe'].isel({coordinates['y']: 0}).squeeze(drop=True) - ds['r'].attrs['units'] = 'm' - # remove x-index coordinate, don't need when we have 'r' as a radial coordinate - ds = ds.drop('x') - ds = ds.set_coords('r') - ds = ds.rename(x='r') - - if hthe_from_grid: - # remove hthe because it does not have correct metadata - del ds['hthe'] + # If 'r' already in ds.coords, then we are re-applying this geometry so can skip this + # part + if 'r' not in ds.coords: + # Add 'hthe' from grid file, needed below for radial coordinate + if 'hthe' not in ds: + hthe_from_grid = True + if grid is None: + raise ValueError("Grid file is required to provide %s. Pass the grid " + "file name as the 'gridfilepath' argument to " + "open_boutdataset().") + ds['hthe'] = grid['hthe'] + else: + hthe_from_grid = False + + # Add 1D radial coordinate + if 'r' in ds: + raise ValueError("Cannot have variable 'r' in dataset when using " + "geometry='s-alpha'") + ds['r'] = ds['hthe'].isel({coordinates['y']: 0}).squeeze(drop=True) + ds['r'].attrs['units'] = 'm' + # remove x-index coordinate, don't need when we have 'r' as a radial coordinate + ds = ds.drop('x') + ds = ds.set_coords('r') + ds = ds.rename(x='r') + + if hthe_from_grid: + # remove hthe because it does not have correct metadata + del ds['hthe'] return ds From 15dfe815be5fbdf2e0f08f75b995ef958006086e Mon Sep 17 00:00:00 2001 From: John Omotani Date: Wed, 18 Mar 2020 11:56:00 +0000 Subject: [PATCH 18/94] Fix applying s-alpha geometry Need to slice hthe with 'y' instead of 'theta' if it was read from grid file. --- xbout/geometries.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/xbout/geometries.py b/xbout/geometries.py index d7b7f638..fb54b0f2 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -229,6 +229,7 @@ def add_s_alpha_geometry_coords(ds, *, coordinates=None, grid=None): # Add 'hthe' from grid file, needed below for radial coordinate if 'hthe' not in ds: hthe_from_grid = True + ycoord = 'y' if grid is None: raise ValueError("Grid file is required to provide %s. Pass the grid " "file name as the 'gridfilepath' argument to " @@ -236,12 +237,13 @@ def add_s_alpha_geometry_coords(ds, *, coordinates=None, grid=None): ds['hthe'] = grid['hthe'] else: hthe_from_grid = False + ycoord = coordinates['y'] # Add 1D radial coordinate if 'r' in ds: raise ValueError("Cannot have variable 'r' in dataset when using " "geometry='s-alpha'") - ds['r'] = ds['hthe'].isel({coordinates['y']: 0}).squeeze(drop=True) + ds['r'] = ds['hthe'].isel({ycoord: 0}).squeeze(drop=True) ds['r'].attrs['units'] = 'm' # remove x-index coordinate, don't need when we have 'r' as a radial coordinate ds = ds.drop('x') From 4b4999a6dd12074f4e37d571ccd4e7eea5542c6f Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 17 Mar 2020 18:55:59 +0000 Subject: [PATCH 19/94] Add dy to interpolated DataArray as a coordinate Adding 'dy' as a coordinate allows it to be assembled correctly when DataArrays are combined with combine_by_coords, which is much more straightforward than recalculating it from the y-coordinate. When initialising a Dataset from the interpolated variables, will demote 'dy' to a variable again. --- xbout/boutdataarray.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 6984e780..759e1314 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -398,6 +398,7 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, da = self.data region = da.regions[region] + tcoord = da.metadata['bout_tdim'] xcoord = da.metadata['bout_xdim'] ycoord = da.metadata['bout_ydim'] zcoord = da.metadata['bout_zdim'] @@ -439,6 +440,20 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, da = _update_metadata_increased_resolution(da, n) + # Add dy to da as a coordinate. This will only be temporary, once we have combined + # the regions together, we will demote dy to a regular variable + dy_array = xr.DataArray(np.full([da.sizes[xcoord], da.sizes[ycoord]], dy), + dims=[xcoord, ycoord]) + # need a view of da with only x- and y-dimensions, unfortunately no neat way to do + # this with isel + da_2d = da + if tcoord in da.sizes: + da_2d = da_2d.isel(**{tcoord: 0}, drop=True) + if zcoord in da.sizes: + da_2d = da_2d.isel(**{zcoord: 0}, drop=True) + dy_array = da_2d.copy(data=dy_array) + da = da.assign_coords(dy=dy_array) + # Remove regions which have incorrect information for the high-resolution grid. # New regions will be generated when creating a new Dataset in # BoutDataset.getHighParallelResVars From 72865d0c0c82dd8e25dfb9760dd73f76d452ec02 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 17 Mar 2020 19:10:17 +0000 Subject: [PATCH 20/94] Create new Dataset of high-parallel resolution variables Add method BoutDataset.getHighParallelResVars() that takes a list of variables, and returns a new BoutDataset containing those variables with an increased parallel resolution. The new Dataset is a fully valid BoutDataset, so all plotting methods, etc. work. --- xbout/boutdataset.py | 56 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 2a8d67f1..9a1c10fa 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -16,6 +16,7 @@ import numpy as np from dask.diagnostics import ProgressBar +from .geometries import apply_geometry from .plotting.animate import animate_poloidal, animate_pcolormesh, animate_line from .plotting.utils import _create_norm @@ -95,6 +96,61 @@ def resetParallelInterpFactor(self, n): return ds + def getHighParallelResVars(self, variables, **kwargs): + """ + Interpolate in the parallel direction to get a higher resolution version of one + or more variables. + + Parameters + ---------- + variables : str or sequence of str + The names of the variables to interpolate + n : int, optional + The factor to increase the resolution by. Defaults to the value set by + BoutDataset.setupParallelInterp(), or 10 if that has not been called. + toroidal_points : int or sequence of int, optional + If int, number of toroidal points to output, applies a stride to toroidal + direction to save memory usage. If sequence of int, the indexes of toroidal + points for the output. + method : str, optional + The interpolation method to use. Options from xarray.DataArray.interp(), + currently: linear, nearest, zero, slinear, quadratic, cubic. Default is + 'cubic'. + + Returns + ------- + A new Dataset containing a high-resolution versions of the variables. The new + Dataset is a valid BoutDataset, although containing only the specified variables. + """ + if isinstance(variables, str): + ds = self.data[variables].bout.highParallelRes(**kwargs) + else: + # Need to start with a Dataset with attrs as merge() drops the attrs of the + # passed-in argument. + ds = self.data[variables[0]].bout.highParallelRes(**kwargs) + for var in variables[1:]: + ds.merge(self.data[var].bout.highParallelRes(**kwargs)) + + # Add extra variables needed to make this a valid Dataset + ds['dx'] = self.data['dx'].bout.highParallelRes(**kwargs)['dx'] + + # dy needs to be compatible with the new poloidal coordinate + # dy was created as a coordinate in BoutDataArray.highParallelResRegion, here just + # need to demote back to a regular variable. + ds = ds.reset_coords('dy') + + # Apply geometry + try: + ds = apply_geometry(ds, ds.geometry) + except AttributeError as e: + # if no geometry was originally applied, then ds has no geometry attribute and + # we can continue without applying geometry here + if str(e) != "'Dataset' object has no attribute 'geometry'": + raise + + return ds + + def save(self, savepath='./boutdata.nc', filetype='NETCDF4', variables=None, save_dtype=None, separate_vars=False, pre_load=False): """ From ac7e29aad46ef006148cc0aa229adeacf14dbec4 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 17 Mar 2020 19:16:35 +0000 Subject: [PATCH 21/94] Only select toroidal_points if variable has z-dimension --- xbout/boutdataarray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 759e1314..7e8496ed 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -463,7 +463,7 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, # Want output in non-aligned coordinates da = da.bout.fromFieldAligned() - if toroidal_points is not None: + if toroidal_points is not None and zcoord in da.sizes: if isinstance(toroidal_points, int): nz = len(da[zcoord]) zstride = nz//toroidal_points From dbd28625b1111ec55e917310e5ad7a4d2672049c Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 14:22:32 +0000 Subject: [PATCH 22/94] Test for _update_metadata_increased_resolution() --- xbout/tests/test_utils.py | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/xbout/tests/test_utils.py b/xbout/tests/test_utils.py index c973368e..546e5c9d 100644 --- a/xbout/tests/test_utils.py +++ b/xbout/tests/test_utils.py @@ -2,7 +2,7 @@ from xarray import Dataset, DataArray -from xbout.utils import _set_attrs_on_all_vars +from xbout.utils import _set_attrs_on_all_vars, _update_metadata_increased_resolution class TestUtils: @@ -36,3 +36,26 @@ def test__set_attrs_on_all_vars_copy(self): assert ds.metadata['x'] == 5 assert ds['a'].metadata['x'] == 3 assert ds['b'].metadata['x'] == 3 + + def test__update_metadata_increased_resolution(self): + da = DataArray() + da.attrs['metadata'] = { + 'jyseps1_1': 1, + 'jyseps2_1': 2, + 'ny_inner': 3, + 'jyseps1_2': 4, + 'jyseps2_2': 5, + 'ny': 6, + 'MYSUB': 7, + } + + da = _update_metadata_increased_resolution(da, 3) + + assert da.metadata['jyseps1_1'] == 5 + assert da.metadata['jyseps2_1'] == 8 + assert da.metadata['jyseps1_2'] == 14 + assert da.metadata['jyseps2_2'] == 17 + + assert da.metadata['ny_inner'] == 9 + assert da.metadata['ny'] == 18 + assert da.metadata['MYSUB'] == 21 From 06fdb911218685ed17c4e4b3cf7cb56418f4e200 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 16:11:36 +0000 Subject: [PATCH 23/94] Test for BoutDataSet.resetParallelInterpFactor() --- xbout/tests/test_boutdataset.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index e031f102..ffe40b1f 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -8,6 +8,7 @@ from xbout.tests.test_load import bout_xyt_example_files, create_bout_ds from xbout import BoutDatasetAccessor, open_boutdataset from xbout.geometries import apply_geometry +from xbout.utils import _set_attrs_on_all_vars EXAMPLE_OPTIONS_FILE_PATH = './xbout/tests/data/options/BOUT.inp' @@ -81,6 +82,21 @@ def test_getFieldAligned(self, tmpdir_factory, bout_xyt_example_files): ds['n_aligned'] = ds['T'] xrt.assert_allclose(ds.bout.getFieldAligned('n'), ds['T']) + def test_resetParallelInterpFactor(self): + ds = Dataset() + ds['a'] = DataArray() + ds = _set_attrs_on_all_vars(ds, 'metadata', {}) + + with pytest.raises(KeyError): + ds.metadata['fine_interpolation_factor'] + with pytest.raises(KeyError): + ds['a'].metadata['fine_interpolation_factor'] + + ds.bout.resetParallelInterpFactor(42) + + assert ds.metadata['fine_interpolation_factor'] == 42 + assert ds['a'].metadata['fine_interpolation_factor'] == 42 + class TestLoadInputFile: @pytest.mark.skip From 373c43b3ea94dce0666a86206e8ce2db95e3602f Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 19:13:50 +0000 Subject: [PATCH 24/94] import numpy.testing as npt instead of importing assert_allclose xarray.testing also provides an assert_allclose function, so it is clearer to be explicit about which module the function belongs to. --- xbout/tests/test_boutdataarray.py | 254 +++++++++++++++--------------- 1 file changed, 127 insertions(+), 127 deletions(-) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index 95365dea..d3a21cc4 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -1,5 +1,5 @@ import numpy as np -from numpy.testing import assert_allclose +import numpy.testing as npt from xarray.core.utils import dict_equiv @@ -43,69 +43,69 @@ def test_toFieldAligned(self, tmpdir_factory, bout_xyt_example_files): n.attrs['direction_y'] = 'Standard' n_al = n.bout.toFieldAligned() for t in range(ds.sizes['t']): - assert_allclose(n_al[t, 0, 0, 0].values, 1000.*t + 0., rtol=1.e-15, atol=5.e-16) # noqa: E501 - assert_allclose(n_al[t, 0, 0, 1].values, 1000.*t + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 0, 2].values, 1000.*t + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 0, 3].values, 1000.*t + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 0, 4].values, 1000.*t + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 0, 5].values, 1000.*t + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 0, 6].values, 1000.*t + 6., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_al[t, 0, 1, 0].values, 1000.*t + 10.*1. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 1, 1].values, 1000.*t + 10.*1. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 1, 2].values, 1000.*t + 10.*1. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 1, 3].values, 1000.*t + 10.*1. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 1, 4].values, 1000.*t + 10.*1. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 1, 5].values, 1000.*t + 10.*1. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 1, 6].values, 1000.*t + 10.*1. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_al[t, 0, 2, 0].values, 1000.*t + 10.*2. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 2, 1].values, 1000.*t + 10.*2. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 2, 2].values, 1000.*t + 10.*2. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 2, 3].values, 1000.*t + 10.*2. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 2, 4].values, 1000.*t + 10.*2. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 2, 5].values, 1000.*t + 10.*2. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 2, 6].values, 1000.*t + 10.*2. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_al[t, 0, 3, 0].values, 1000.*t + 10.*3. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 3, 1].values, 1000.*t + 10.*3. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 3, 2].values, 1000.*t + 10.*3. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 3, 3].values, 1000.*t + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 3, 4].values, 1000.*t + 10.*3. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 3, 5].values, 1000.*t + 10.*3. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 0, 3, 6].values, 1000.*t + 10.*3. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_al[t, 1, 0, 0].values, 1000.*t + 100.*1 + 10.*0. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 0, 1].values, 1000.*t + 100.*1 + 10.*0. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 0, 2].values, 1000.*t + 100.*1 + 10.*0. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 0, 3].values, 1000.*t + 100.*1 + 10.*0. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 0, 4].values, 1000.*t + 100.*1 + 10.*0. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 0, 5].values, 1000.*t + 100.*1 + 10.*0. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 0, 6].values, 1000.*t + 100.*1 + 10.*0. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_al[t, 1, 1, 0].values, 1000.*t + 100.*1 + 10.*1. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 1, 1].values, 1000.*t + 100.*1 + 10.*1. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 1, 2].values, 1000.*t + 100.*1 + 10.*1. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 1, 3].values, 1000.*t + 100.*1 + 10.*1. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 1, 4].values, 1000.*t + 100.*1 + 10.*1. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 1, 5].values, 1000.*t + 100.*1 + 10.*1. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 1, 6].values, 1000.*t + 100.*1 + 10.*1. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_al[t, 1, 2, 0].values, 1000.*t + 100.*1 + 10.*2. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 2, 1].values, 1000.*t + 100.*1 + 10.*2. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 2, 2].values, 1000.*t + 100.*1 + 10.*2. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 2, 3].values, 1000.*t + 100.*1 + 10.*2. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 2, 4].values, 1000.*t + 100.*1 + 10.*2. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 2, 5].values, 1000.*t + 100.*1 + 10.*2. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 2, 6].values, 1000.*t + 100.*1 + 10.*2. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_al[t, 1, 3, 0].values, 1000.*t + 100.*1 + 10.*3. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 3, 1].values, 1000.*t + 100.*1 + 10.*3. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 3, 2].values, 1000.*t + 100.*1 + 10.*3. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 3, 3].values, 1000.*t + 100.*1 + 10.*3. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 3, 4].values, 1000.*t + 100.*1 + 10.*3. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 3, 5].values, 1000.*t + 100.*1 + 10.*3. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_al[t, 1, 3, 6].values, 1000.*t + 100.*1 + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 0, 0].values, 1000.*t + 0., rtol=1.e-15, atol=5.e-16) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 0, 1].values, 1000.*t + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 0, 2].values, 1000.*t + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 0, 3].values, 1000.*t + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 0, 4].values, 1000.*t + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 0, 5].values, 1000.*t + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 0, 6].values, 1000.*t + 6., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_al[t, 0, 1, 0].values, 1000.*t + 10.*1. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 1, 1].values, 1000.*t + 10.*1. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 1, 2].values, 1000.*t + 10.*1. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 1, 3].values, 1000.*t + 10.*1. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 1, 4].values, 1000.*t + 10.*1. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 1, 5].values, 1000.*t + 10.*1. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 1, 6].values, 1000.*t + 10.*1. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_al[t, 0, 2, 0].values, 1000.*t + 10.*2. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 2, 1].values, 1000.*t + 10.*2. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 2, 2].values, 1000.*t + 10.*2. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 2, 3].values, 1000.*t + 10.*2. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 2, 4].values, 1000.*t + 10.*2. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 2, 5].values, 1000.*t + 10.*2. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 2, 6].values, 1000.*t + 10.*2. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_al[t, 0, 3, 0].values, 1000.*t + 10.*3. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 3, 1].values, 1000.*t + 10.*3. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 3, 2].values, 1000.*t + 10.*3. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 3, 3].values, 1000.*t + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 3, 4].values, 1000.*t + 10.*3. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 3, 5].values, 1000.*t + 10.*3. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 3, 6].values, 1000.*t + 10.*3. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_al[t, 1, 0, 0].values, 1000.*t + 100.*1 + 10.*0. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 0, 1].values, 1000.*t + 100.*1 + 10.*0. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 0, 2].values, 1000.*t + 100.*1 + 10.*0. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 0, 3].values, 1000.*t + 100.*1 + 10.*0. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 0, 4].values, 1000.*t + 100.*1 + 10.*0. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 0, 5].values, 1000.*t + 100.*1 + 10.*0. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 0, 6].values, 1000.*t + 100.*1 + 10.*0. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_al[t, 1, 1, 0].values, 1000.*t + 100.*1 + 10.*1. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 1, 1].values, 1000.*t + 100.*1 + 10.*1. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 1, 2].values, 1000.*t + 100.*1 + 10.*1. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 1, 3].values, 1000.*t + 100.*1 + 10.*1. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 1, 4].values, 1000.*t + 100.*1 + 10.*1. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 1, 5].values, 1000.*t + 100.*1 + 10.*1. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 1, 6].values, 1000.*t + 100.*1 + 10.*1. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_al[t, 1, 2, 0].values, 1000.*t + 100.*1 + 10.*2. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 2, 1].values, 1000.*t + 100.*1 + 10.*2. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 2, 2].values, 1000.*t + 100.*1 + 10.*2. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 2, 3].values, 1000.*t + 100.*1 + 10.*2. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 2, 4].values, 1000.*t + 100.*1 + 10.*2. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 2, 5].values, 1000.*t + 100.*1 + 10.*2. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 2, 6].values, 1000.*t + 100.*1 + 10.*2. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_al[t, 1, 3, 0].values, 1000.*t + 100.*1 + 10.*3. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 3, 1].values, 1000.*t + 100.*1 + 10.*3. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 3, 2].values, 1000.*t + 100.*1 + 10.*3. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 3, 3].values, 1000.*t + 100.*1 + 10.*3. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 3, 4].values, 1000.*t + 100.*1 + 10.*3. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 3, 5].values, 1000.*t + 100.*1 + 10.*3. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 3, 6].values, 1000.*t + 100.*1 + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 def test_fromFieldAligned(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, nxpe=1, nype=1, nt=1) @@ -130,66 +130,66 @@ def test_fromFieldAligned(self, tmpdir_factory, bout_xyt_example_files): n.attrs['direction_y'] = 'Aligned' n_nal = n.bout.fromFieldAligned() for t in range(ds.sizes['t']): - assert_allclose(n_nal[t, 0, 0, 0].values, 1000.*t + 0., rtol=1.e-15, atol=5.e-16) # noqa: E501 - assert_allclose(n_nal[t, 0, 0, 1].values, 1000.*t + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 0, 2].values, 1000.*t + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 0, 3].values, 1000.*t + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 0, 4].values, 1000.*t + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 0, 5].values, 1000.*t + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 0, 6].values, 1000.*t + 6., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_nal[t, 0, 1, 0].values, 1000.*t + 10.*1. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 1, 1].values, 1000.*t + 10.*1. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 1, 2].values, 1000.*t + 10.*1. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 1, 3].values, 1000.*t + 10.*1. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 1, 4].values, 1000.*t + 10.*1. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 1, 5].values, 1000.*t + 10.*1. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 1, 6].values, 1000.*t + 10.*1. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_nal[t, 0, 2, 0].values, 1000.*t + 10.*2. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 2, 1].values, 1000.*t + 10.*2. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 2, 2].values, 1000.*t + 10.*2. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 2, 3].values, 1000.*t + 10.*2. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 2, 4].values, 1000.*t + 10.*2. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 2, 5].values, 1000.*t + 10.*2. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 2, 6].values, 1000.*t + 10.*2. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_nal[t, 0, 3, 0].values, 1000.*t + 10.*3. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 3, 1].values, 1000.*t + 10.*3. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 3, 2].values, 1000.*t + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 3, 3].values, 1000.*t + 10.*3. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 3, 4].values, 1000.*t + 10.*3. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 3, 5].values, 1000.*t + 10.*3. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 0, 3, 6].values, 1000.*t + 10.*3. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_nal[t, 1, 0, 0].values, 1000.*t + 100.*1 + 10.*0. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 0, 1].values, 1000.*t + 100.*1 + 10.*0. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 0, 2].values, 1000.*t + 100.*1 + 10.*0. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 0, 3].values, 1000.*t + 100.*1 + 10.*0. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 0, 4].values, 1000.*t + 100.*1 + 10.*0. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 0, 5].values, 1000.*t + 100.*1 + 10.*0. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 0, 6].values, 1000.*t + 100.*1 + 10.*0. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_nal[t, 1, 1, 0].values, 1000.*t + 100.*1 + 10.*1. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 1, 1].values, 1000.*t + 100.*1 + 10.*1. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 1, 2].values, 1000.*t + 100.*1 + 10.*1. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 1, 3].values, 1000.*t + 100.*1 + 10.*1. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 1, 4].values, 1000.*t + 100.*1 + 10.*1. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 1, 5].values, 1000.*t + 100.*1 + 10.*1. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 1, 6].values, 1000.*t + 100.*1 + 10.*1. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_nal[t, 1, 2, 0].values, 1000.*t + 100.*1 + 10.*2. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 2, 1].values, 1000.*t + 100.*1 + 10.*2. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 2, 2].values, 1000.*t + 100.*1 + 10.*2. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 2, 3].values, 1000.*t + 100.*1 + 10.*2. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 2, 4].values, 1000.*t + 100.*1 + 10.*2. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 2, 5].values, 1000.*t + 100.*1 + 10.*2. + 6., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 2, 6].values, 1000.*t + 100.*1 + 10.*2. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - - assert_allclose(n_nal[t, 1, 3, 0].values, 1000.*t + 100.*1 + 10.*3. + 0., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 3, 1].values, 1000.*t + 100.*1 + 10.*3. + 1., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 3, 2].values, 1000.*t + 100.*1 + 10.*3. + 2., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 3, 3].values, 1000.*t + 100.*1 + 10.*3. + 3., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 3, 4].values, 1000.*t + 100.*1 + 10.*3. + 4., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 3, 5].values, 1000.*t + 100.*1 + 10.*3. + 5., rtol=1.e-15, atol=0.) # noqa: E501 - assert_allclose(n_nal[t, 1, 3, 6].values, 1000.*t + 100.*1 + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 0, 0].values, 1000.*t + 0., rtol=1.e-15, atol=5.e-16) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 0, 1].values, 1000.*t + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 0, 2].values, 1000.*t + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 0, 3].values, 1000.*t + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 0, 4].values, 1000.*t + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 0, 5].values, 1000.*t + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 0, 6].values, 1000.*t + 6., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_nal[t, 0, 1, 0].values, 1000.*t + 10.*1. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 1, 1].values, 1000.*t + 10.*1. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 1, 2].values, 1000.*t + 10.*1. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 1, 3].values, 1000.*t + 10.*1. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 1, 4].values, 1000.*t + 10.*1. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 1, 5].values, 1000.*t + 10.*1. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 1, 6].values, 1000.*t + 10.*1. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_nal[t, 0, 2, 0].values, 1000.*t + 10.*2. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 2, 1].values, 1000.*t + 10.*2. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 2, 2].values, 1000.*t + 10.*2. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 2, 3].values, 1000.*t + 10.*2. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 2, 4].values, 1000.*t + 10.*2. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 2, 5].values, 1000.*t + 10.*2. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 2, 6].values, 1000.*t + 10.*2. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_nal[t, 0, 3, 0].values, 1000.*t + 10.*3. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 3, 1].values, 1000.*t + 10.*3. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 3, 2].values, 1000.*t + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 3, 3].values, 1000.*t + 10.*3. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 3, 4].values, 1000.*t + 10.*3. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 3, 5].values, 1000.*t + 10.*3. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 0, 3, 6].values, 1000.*t + 10.*3. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_nal[t, 1, 0, 0].values, 1000.*t + 100.*1 + 10.*0. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 0, 1].values, 1000.*t + 100.*1 + 10.*0. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 0, 2].values, 1000.*t + 100.*1 + 10.*0. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 0, 3].values, 1000.*t + 100.*1 + 10.*0. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 0, 4].values, 1000.*t + 100.*1 + 10.*0. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 0, 5].values, 1000.*t + 100.*1 + 10.*0. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 0, 6].values, 1000.*t + 100.*1 + 10.*0. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_nal[t, 1, 1, 0].values, 1000.*t + 100.*1 + 10.*1. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 1, 1].values, 1000.*t + 100.*1 + 10.*1. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 1, 2].values, 1000.*t + 100.*1 + 10.*1. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 1, 3].values, 1000.*t + 100.*1 + 10.*1. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 1, 4].values, 1000.*t + 100.*1 + 10.*1. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 1, 5].values, 1000.*t + 100.*1 + 10.*1. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 1, 6].values, 1000.*t + 100.*1 + 10.*1. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_nal[t, 1, 2, 0].values, 1000.*t + 100.*1 + 10.*2. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 2, 1].values, 1000.*t + 100.*1 + 10.*2. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 2, 2].values, 1000.*t + 100.*1 + 10.*2. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 2, 3].values, 1000.*t + 100.*1 + 10.*2. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 2, 4].values, 1000.*t + 100.*1 + 10.*2. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 2, 5].values, 1000.*t + 100.*1 + 10.*2. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 2, 6].values, 1000.*t + 100.*1 + 10.*2. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + + npt.assert_allclose(n_nal[t, 1, 3, 0].values, 1000.*t + 100.*1 + 10.*3. + 0., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 3, 1].values, 1000.*t + 100.*1 + 10.*3. + 1., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 3, 2].values, 1000.*t + 100.*1 + 10.*3. + 2., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 3, 3].values, 1000.*t + 100.*1 + 10.*3. + 3., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 3, 4].values, 1000.*t + 100.*1 + 10.*3. + 4., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 3, 5].values, 1000.*t + 100.*1 + 10.*3. + 5., rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_nal[t, 1, 3, 6].values, 1000.*t + 100.*1 + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 From 8d10dde0756144e6938e8ada14b2f11d8090b4e2 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 19:15:10 +0000 Subject: [PATCH 25/94] Tests for BoutDataArray.highParallelResRegion() --- xbout/tests/test_boutdataarray.py | 70 +++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index d3a21cc4..d84eefa0 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -1,6 +1,8 @@ import numpy as np import numpy.testing as npt +from pathlib import Path +import xarray as xr from xarray.core.utils import dict_equiv from xbout.tests.test_load import bout_xyt_example_files, create_bout_ds @@ -193,3 +195,71 @@ def test_fromFieldAligned(self, tmpdir_factory, bout_xyt_example_files): npt.assert_allclose(n_nal[t, 1, 3, 4].values, 1000.*t + 100.*1 + 10.*3. + 4., rtol=1.e-15, atol=0.) # noqa: E501 npt.assert_allclose(n_nal[t, 1, 3, 5].values, 1000.*t + 100.*1 + 10.*3. + 5., rtol=1.e-15, atol=0.) # noqa: E501 npt.assert_allclose(n_nal[t, 1, 3, 6].values, 1000.*t + 100.*1 + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + + def test_highParallelResRegion_core(self, tmpdir_factory, bout_xyt_example_files): + path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, + nype=1, nt=1, grid='grid', guards={'y':2}, + topology='core') + + ds = open_boutdataset(datapath=path, + gridfilepath=Path(path).parent.joinpath('grid.nc'), + geometry='toroidal', keep_yboundaries=True) + + n = ds['n'] + + thetalength = 2.*np.pi + + dtheta = thetalength/16. + theta = xr.DataArray(np.linspace(0. - 1.5*dtheta, thetalength + 1.5*dtheta, 20), + dims='theta') + + dtheta_fine = thetalength/128. + theta_fine = xr.DataArray( + np.linspace(0. + dtheta_fine/2., thetalength - dtheta_fine/2., 128), + dims='theta') + + def f(t): + t = np.sin(t) + return (t**3 - t**2 + t - 1.) + + n.data = f(theta).broadcast_like(n) + + n_highres = n.bout.highParallelResRegion('core') + + expected = f(theta_fine).broadcast_like(n_highres) + + npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) + + def test_highParallelResRegion_sol(self, tmpdir_factory, bout_xyt_example_files): + path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, + nype=1, nt=1, grid='grid', guards={'y':2}, + topology='sol') + + ds = open_boutdataset(datapath=path, + gridfilepath=Path(path).parent.joinpath('grid.nc'), + geometry='toroidal', keep_yboundaries=True) + + n = ds['n'] + + thetalength = 2.*np.pi + + dtheta = thetalength/16. + theta = xr.DataArray(np.linspace(0. - 1.5*dtheta, thetalength + 1.5*dtheta, 20), + dims='theta') + + dtheta_fine = thetalength/128. + theta_fine = xr.DataArray( + np.linspace(0. - 1.5*dtheta_fine, thetalength + 1.5*dtheta_fine, 132), + dims='theta') + + def f(t): + t = np.sin(t) + return (t**3 - t**2 + t - 1.) + + n.data = f(theta).broadcast_like(n) + + n_highres = n.bout.highParallelResRegion('SOL') + + expected = f(theta_fine).broadcast_like(n_highres) + + npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) From 5ac595fdea5a846d84b9463bc001754b9caf004e Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 19:15:38 +0000 Subject: [PATCH 26/94] Drop attrs that do not belong in Dataset in BoutDataArray.to_dataset() Attributes like 'direction_y' only make sense for a particular DataArray, not the whole Dataset. --- xbout/boutdataarray.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 7e8496ed..f26f0319 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -48,12 +48,18 @@ def __str__(self): def to_dataset(self): """ Convert a DataArray to a Dataset, copying the attributes from the DataArray to - the Dataset. + the Dataset, and dropping attributes that only make sense for a DataArray """ da = self.data ds = da.to_dataset() - ds.attrs = da.attrs + ds.attrs = deepcopy(da.attrs) + + def dropIfExists(ds, name): + if name in ds.attrs: + del ds.attrs[name] + dropIfExists(ds, 'direction_y') + dropIfExists(ds, 'direction_z') return ds From d8f8d548c425f334cd23a5c7efa3964f52c9158e Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 19:17:02 +0000 Subject: [PATCH 27/94] Define global 1d coordinates in apply_geometry() Some coordinates corresponding to x (calculated from the index), y (calculated from dy) and z (calculated from ZMIN and ZMAX) can always be created, although they might be named differently. So create them in the top-level apply_geometry() function, not the registered functions for particular geometries. --- xbout/geometries.py | 69 +++++++++++++++++++++++++++++---------------- 1 file changed, 44 insertions(+), 25 deletions(-) diff --git a/xbout/geometries.py b/xbout/geometries.py index fb54b0f2..4b9dc940 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -68,6 +68,50 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): else: updated_ds = add_geometry_coords(ds) + # Add global 1D coordinates + # ###################### + # Note the global coordinates used here are defined so that they are zero at + # the boundaries of the grid (where the grid includes all boundary cells), not + # necessarily the physical boundaries, because constant offsets do not matter, as long + # as these bounds are consistent with the global coordinates defined in + # Region.__init__() (we will only use these coordinates for interpolation) and it is + # simplest to calculate them with cumsum(). + xcoord = updated_ds.metadata.get('bout_xdim', 'x') + ycoord = updated_ds.metadata.get('bout_ydim', 'y') + zcoord = updated_ds.metadata.get('bout_zdim', 'z') + if xcoord not in ds.coords: + # Make index 'x' a coordinate, useful for handling global indexing + # Note we have to use the index value, not the value calculated from 'dx' because + # 'dx' may not be consistent between different regions (e.g. core and PFR). + # For some geometries xcoord may have already been created by add_geometry_coords, + # in which case we do not need this. + nx = updated_ds.dims[xcoord] + updated_ds = updated_ds.assign_coords(**{xcoord: np.arange(nx)}) + ny = updated_ds.dims[ycoord] + # dy should always be constant in x, so it is safe to slice to x=0. + # [The y-coordinate has to be a 1d coordinate that labels x-z slices of the grid + # (similarly x-coordinate is 1d coordinate that labels y-z slices and + # z-coordinate is a 1d coordinate that labels x-y slices). A coordinate might + # have different values in disconnected regions, but there are no branch-cuts + # allowed in the x-direction in BOUT++ (at least for the momement), so the + # y-coordinate has to be 1d and single-valued. Therefore similarly dy has to be + # 1d and single-valued.] Need drop=True so that the result does not have an + # x-coordinate value which prevents it being added as a coordinate. + dy = updated_ds['dy'].isel({xcoord: 0}, drop=True) + + # calculate ycoord at the centre of each cell + y = dy.cumsum(keep_attrs=True) - dy/2. + updated_ds = updated_ds.assign_coords(**{ycoord: y.values}) + + # If full data (not just grid file) then toroidal dim will be present + if zcoord in updated_ds.dims: + nz = updated_ds.dims[zcoord] + z = xr.DataArray( + np.linspace(start=updated_ds.metadata['ZMIN'], + stop=2 * np.pi * updated_ds.metadata['ZMAX'], num=nz), + dims=zcoord) + updated_ds = updated_ds.assign_coords(**{zcoord: z}) + return updated_ds @@ -155,26 +199,6 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): # Change names of dimensions to Orthogonal Toroidal ones ds = ds.rename(y=coordinates['y']) - # Add 1D Orthogonal Toroidal coordinates - # Make index 'x' a coordinate, useful for handling global indexing - nx = ds.dims['x'] - ds = ds.assign_coords(x=np.arange(nx)) - ny = ds.dims[coordinates['y']] - # dy should always be constant in x, so it is safe to slice to x=0. - # [The y-coordinate has to be a 1d coordinate that labels x-z slices of the grid - # (similarly x-coordinate is 1d coordinate that labels y-z slices and - # z-coordinate is a 1d coordinate that labels x-y slices). A coordinate might - # have different values in disconnected regions, but there are no branch-cuts - # allowed in the x-direction in BOUT++ (at least for the momement), so the - # y-coordinate has to be 1d and single-valued. Therefore similarly dy has to be - # 1d and single-valued.] Need drop=True so that the result does not have an - # x-coordinate value which prevents it being added as a coordinate. - dy = ds['dy'].isel(x=0, drop=True) - - # calculate theta at the centre of each cell - theta = dy.cumsum(keep_attrs=True) - dy/2. - ds = ds.assign_coords(**{coordinates['y']: theta}) - # TODO automatically make this coordinate 1D in simplified cases? ds = ds.rename(psixy=coordinates['x']) ds = ds.set_coords(coordinates['x']) @@ -189,11 +213,6 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): # If full data (not just grid file) then toroidal dim will be present if 'z' in ds.dims: ds = ds.rename(z=coordinates['z']) - nz = ds.dims[coordinates['z']] - phi = xr.DataArray(np.linspace(start=ds.metadata['ZMIN'], - stop=2 * np.pi * ds.metadata['ZMAX'], num=nz), - dims=coordinates['z']) - ds = ds.assign_coords(**{coordinates['z']: phi}) # Record which dimension 'z' was renamed to. ds.metadata['bout_zdim'] = coordinates['z'] From ba2b5b4c10ea225c0ba2b148a550a67e3bb9c371 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 19:26:46 +0000 Subject: [PATCH 28/94] Fix the coordinate calculations in Region.__init__() Previously were off by half a grid-cell. --- xbout/region.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/xbout/region.py b/xbout/region.py index 873ee664..79ef71e9 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -59,18 +59,30 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, xcoord = ds.metadata['bout_xdim'] ycoord = ds.metadata['bout_ydim'] + # Note the global coordinates used here are defined so that they are zero at + # the boundaries of the grid (where the grid includes all boundary cells), not + # necessarily the physical boundaries because constant offsets do not matter, + # as long as these bounds are consistent with the global coordinates defined + # in apply_geometry (we will only use these coordinates for interpolation) and + # it is simplest to calculate them with cumsum(). + # dx is constant in any particular region in the y-direction, so convert to a # 1d array + # Note that this is not the same coordinate as the 'x' coordinate that is + # created by default from the x-index, as these values are set only for + # particular regions, so do not need to be consistent between different + # regions (e.g. core and PFR), so we are not forced to use just the index + # value here. dx = ds['dx'].isel(**{ycoord: self.ylower_ind}) dx_cumsum = dx.cumsum() - self.xinner = dx_cumsum[xinner_ind] - dx[xinner_ind]/2. - self.xouter = dx_cumsum[xouter_ind - 1] + dx[xouter_ind - 1]/2. + self.xinner = dx_cumsum[xinner_ind] - dx[xinner_ind] + self.xouter = dx_cumsum[xouter_ind - 1] + dx[xouter_ind - 1] # dy is constant in the x-direction, so convert to a 1d array dy = ds['dy'].isel(**{xcoord: self.xinner_ind}) dy_cumsum = dy.cumsum() - self.ylower = dy_cumsum[ylower_ind] - dy[ylower_ind]/2. - self.yupper = dy_cumsum[yupper_ind - 1] + dy[yupper_ind - 1]/2. + self.ylower = dy_cumsum[ylower_ind] - dy[ylower_ind] + self.yupper = dy_cumsum[yupper_ind- 1] def __repr__(self): result = "\n" From c8043965ada0d8f7ce2fc5f5a9219c70f2c86e8d Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 19:27:25 +0000 Subject: [PATCH 29/94] Add 'direction_y' attrs to test data Needed to pass checks in toFieldAligned(). --- xbout/tests/test_load.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/xbout/tests/test_load.py b/xbout/tests/test_load.py index 5a263d44..6dc34cbd 100644 --- a/xbout/tests/test_load.py +++ b/xbout/tests/test_load.py @@ -320,6 +320,8 @@ def create_bout_ds(syn_data_type='random', lengths=(6, 2, 4, 7), num=0, nxpe=1, T = DataArray(data, dims=['t', 'x', 'y', 'z']) n = DataArray(data, dims=['t', 'x', 'y', 'z']) + for v in [n, T]: + v.attrs['direction_y'] = 'Standard' ds = Dataset({'n': n, 'T': T}) # BOUT_VERSION needed so that we know that number of points in z is MZ, not MZ-1 (as From 05138dffe885bca5379aede0d7a23eb3f62b13d0 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 21:40:37 +0000 Subject: [PATCH 30/94] Fix region coordinate limits at boundaries For interpolation, where there is a physical boundary, want the limit of the coordinate (that is stored in the region) to be the global coordinate value at the boundary, not at the grid edge (which was what was stored previously). --- xbout/region.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/xbout/region.py b/xbout/region.py index 79ef71e9..3604ed78 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -54,6 +54,38 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, self.connection_upper = connect_upper if ds is not None: + # self.nx, self.ny should not include boundary points. + # self.xinner, self.xouter, self.ylower, self.yupper + if ds.metadata['keep_xboundaries']: + xbndry = ds.metadata['MXG'] + if self.connection_inner is None: + self.nx -= xbndry + + # used to calculate x-coordinate of inner side (self.xinner) + xinner_ind += xbndry + + if self.connection_outer is None: + self.nx -= xbndry + + # used to calculate x-coordinate of outer side (self.xouter) + xouter_ind -= xbndry + + if ds.metadata['keep_yboundaries']: + ybndry = ds.metadata['MYG'] + if self.connection_lower is None: + self.ny -= ybndry + print('check ny 2', self.ny, self.connection_lower, connect_lower) + + # used to calculate y-coordinate of lower side (self.ylower) + ylower_ind += ybndry + + if self.connection_upper is None: + self.ny -= ybndry + print('check ny 3', self.ny, self.connection_upper, connect_upper) + + # used to calculate y-coordinate of upper side (self.yupper) + yupper_ind -= ybndry + # calculate start and end coordinates ##################################### xcoord = ds.metadata['bout_xdim'] From a1802489e77e549dbefe309b88d701f17db06b91 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 21:48:01 +0000 Subject: [PATCH 31/94] Set up connections in Region constructor Need region connections in the constructor, to adjust coordinate limits if there is a boundary. --- xbout/region.py | 248 ++++++++++++++++++++++++++---------------------- 1 file changed, 135 insertions(+), 113 deletions(-) diff --git a/xbout/region.py b/xbout/region.py index 3604ed78..3df358d5 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -284,14 +284,36 @@ def _get_topology(ds): return 'disconnected-double-null' -def _create_connection_x(regions, inner, outer): - regions[inner].connection_outer = outer - regions[outer].connection_inner = inner - - -def _create_connection_y(regions, lower, upper): - regions[lower].connection_upper = upper - regions[upper].connection_lower = lower +def _check_connections(regions): + for region in regions.values(): + if region.connection_inner is not None: + if regions[region.connection_inner].connection_outer != region.name: + raise ValueError( + 'Inner connection of ' + region.name + ' is ' + + region.connection_inner + ', but outer connection of ' + + region.connection_inner + ' is ' + + regions[region.connection_inner].connection_outer) + if region.connection_outer is not None: + if regions[region.connection_outer].connection_inner != region.name: + raise ValueError( + 'Inner connection of ' + region.name + ' is ' + + region.connection_outer + ', but inner connection of ' + + region.connection_outer + ' is ' + + regions[region.connection_outer].connection_inner) + if region.connection_lower is not None: + if regions[region.connection_lower].connection_upper != region.name: + raise ValueError( + 'Inner connection of ' + region.name + ' is ' + + region.connection_lower + ', but upper connection of ' + + region.connection_lower + ' is ' + + regions[region.connection_lower].connection_upper) + if region.connection_upper is not None: + if regions[region.connection_upper].connection_lower != region.name: + raise ValueError( + 'Inner connection of ' + region.name + ' is ' + + region.connection_upper + ', but lower connection of ' + + region.connection_upper + ' is ' + + regions[region.connection_upper].connection_lower) def _create_regions_toroidal(ds): @@ -352,210 +374,210 @@ def _create_regions_toroidal(ds): if topology == 'disconnected-double-null': regions['lower_inner_PFR'] = Region( name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=0, yupper_ind=jys11 + 1) + ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_intersep', + connect_upper='lower_outer_PFR') regions['lower_inner_intersep'] = Region( name='lower_inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=0, yupper_ind=jys11 + 1) + ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', + connect_outer='lower_inner_SOL', connect_upper='inner_intersep') regions['lower_inner_SOL'] = Region( name='lower_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=0, yupper_ind=jys11 + 1) + ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_intersep', + connect_upper='inner_SOL') regions['inner_core'] = Region( name='inner_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1) + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, + connect_outer='inner_intersep', connect_lower='outer_core', + connect_upper='outer_core') regions['inner_intersep'] = Region( name='inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1) + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_inner='inner_core', + connect_outer='inner_SOL', connect_lower='lower_inner_intersep', + connect_upper='outer_intersep') regions['inner_SOL'] = Region( name='inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1) + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, + connect_inner='inner_intersep', connect_lower='lower_inner_SOL', + connect_upper='upper_inner_SOL') regions['upper_inner_PFR'] = Region( name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys21 + 1, yupper_ind=nyinner) + ylower_ind=jys21 + 1, yupper_ind=nyinner, + connect_outer='upper_inner_intersep', connect_lower='upper_outer_PFR') regions['upper_inner_intersep'] = Region( name='upper_inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys21 + 1, yupper_ind=nyinner) + ylower_ind=jys21 + 1, yupper_ind=nyinner, connect_inner='upper_inner_PFR', + connect_outer='upper_inner_SOL', connect_lower='upper_outer_intersep') regions['upper_inner_SOL'] = Region( name='upper_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys21 + 1, yupper_ind=nyinner) + ylower_ind=jys21 + 1, yupper_ind=nyinner, + connect_inner='upper_inner_intersep', connect_lower='inner_SOL') regions['upper_outer_PFR'] = Region( name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=nyinner, yupper_ind=jys12 + 1) + ylower_ind=nyinner, yupper_ind=jys12 + 1, + connect_outer='upper_outer_intersep', connect_upper='upper_inner_PFR') regions['upper_outer_intersep'] = Region( name='upper_outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=nyinner, yupper_ind=jys12 + 1) + ylower_ind=nyinner, yupper_ind=jys12 + 1, connect_inner='upper_outer_PFR', + connect_outer='upper_outer_SOL', connect_upper='upper_inner_intersep') regions['upper_outer_SOL'] = Region( name='upper_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=nyinner, yupper_ind=jys12 + 1) + ylower_ind=nyinner, yupper_ind=jys12 + 1, + connect_inner='upper_outer_intersep', connect_upper='outer_SOL') regions['outer_core'] = Region( name='outer_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1) + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, + connect_outer='outer_intersep', connect_lower='inner_core', + connect_upper='inner_core') regions['outer_intersep'] = Region( name='outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1) + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_inner='outer_core', + connect_outer='outer_SOL', connect_lower='inner_intersep', + connect_upper='lower_outer_intersep') regions['outer_SOL'] = Region( name='outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1) + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, + connect_inner='outer_intersep', connect_lower='upper_outer_SOL', + connect_upper='lower_outer_SOL') regions['lower_outer_PFR'] = Region( name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny) + ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_intersep', + connect_lower='lower_inner_PFR') regions['lower_outer_intersep'] = Region( name='lower_outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys22 + 1, yupper_ind=ny) + ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', + connect_outer='lower_outer_SOL', connect_lower='outer_intersep') regions['lower_outer_SOL'] = Region( name='lower_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny) - _create_connection_x(regions, 'lower_inner_PFR', 'lower_inner_intersep') - _create_connection_x(regions, 'lower_inner_intersep', 'lower_inner_SOL') - _create_connection_x(regions, 'inner_core', 'inner_intersep') - _create_connection_x(regions, 'inner_intersep', 'inner_SOL') - _create_connection_x(regions, 'upper_inner_PFR', 'upper_inner_intersep') - _create_connection_x(regions, 'upper_inner_intersep', 'upper_inner_SOL') - _create_connection_x(regions, 'upper_outer_PFR', 'upper_outer_intersep') - _create_connection_x(regions, 'upper_outer_intersep', 'upper_outer_SOL') - _create_connection_x(regions, 'outer_core', 'outer_intersep') - _create_connection_x(regions, 'outer_intersep', 'outer_SOL') - _create_connection_x(regions, 'lower_outer_PFR', 'lower_outer_intersep') - _create_connection_x(regions, 'lower_outer_intersep', 'lower_outer_SOL') - _create_connection_y(regions, 'lower_inner_PFR', 'lower_outer_PFR') - _create_connection_y(regions, 'lower_inner_intersep', 'inner_intersep') - _create_connection_y(regions, 'lower_inner_SOL', 'inner_SOL') - _create_connection_y(regions, 'inner_core', 'outer_core') - _create_connection_y(regions, 'outer_core', 'inner_core') - _create_connection_y(regions, 'inner_intersep', 'outer_intersep') - _create_connection_y(regions, 'inner_SOL', 'upper_inner_SOL') - _create_connection_y(regions, 'upper_outer_intersep', 'upper_inner_intersep') - _create_connection_y(regions, 'upper_outer_PFR', 'upper_inner_PFR') - _create_connection_y(regions, 'upper_outer_SOL', 'outer_SOL') - _create_connection_y(regions, 'outer_intersep', 'lower_outer_intersep') - _create_connection_y(regions, 'outer_SOL', 'lower_outer_SOL') + ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_intersep', + connect_lower='outer_SOL') + _check_connections(regions) elif topology == 'connected-double-null': regions['lower_inner_PFR'] = Region( name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=0, yupper_ind=jys11 + 1) + ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_SOL', + connect_upper='lower_outer_PFR') regions['lower_inner_SOL'] = Region( name='lower_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=0, yupper_ind=jys11 + 1) + ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', + connect_upper='inner_SOL') regions['inner_core'] = Region( name='inner_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1) + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_outer='inner_SOL', + connect_lower='outer_core', connect_upper='outer_core') regions['inner_SOL'] = Region( name='inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1) + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_inner='inner_core', + connect_lower='lower_inner_SOL', connect_upper='upper_inner_SOL') regions['upper_inner_PFR'] = Region( name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys21 + 1, yupper_ind=nyinner) + ylower_ind=jys21 + 1, yupper_ind=nyinner, connect_outer='upper_inner_SOL', + connect_lower='upper_outer_PFR') regions['upper_inner_SOL'] = Region( name='upper_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys21 + 1, yupper_ind=nyinner) + ylower_ind=jys21 + 1, yupper_ind=nyinner, connect_inner='upper_inner_PFR', + connect_lower='inner_SOL') regions['upper_outer_PFR'] = Region( name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=nyinner, yupper_ind=jys12 + 1) + ylower_ind=nyinner, yupper_ind=jys12 + 1, connect_outer='upper_outer_SOL', + connect_upper='upper_inner_PFR') regions['upper_outer_SOL'] = Region( name='upper_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=nyinner, yupper_ind=jys12 + 1) + ylower_ind=nyinner, yupper_ind=jys12 + 1, connect_inner='upper_outer_PFR', + connect_upper='outer_SOL') regions['outer_core'] = Region( name='outer_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1) + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_outer='outer_SOL', + connect_lower='inner_core', connect_upper='inner_core') regions['outer_SOL'] = Region( name='outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1) + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_inner='outer_core', + connect_lower='upper_outer_SOL', connect_upper='lower_outer_SOL') regions['lower_outer_PFR'] = Region( name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny) + ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_SOL', + connect_lower='lower_inner_PFR') regions['lower_outer_SOL'] = Region( name='lower_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny) - _create_connection_x(regions, 'lower_inner_PFR', 'lower_inner_SOL') - _create_connection_x(regions, 'inner_core', 'inner_SOL') - _create_connection_x(regions, 'upper_inner_PFR', 'upper_inner_SOL') - _create_connection_x(regions, 'upper_outer_PFR', 'upper_outer_SOL') - _create_connection_x(regions, 'outer_core', 'outer_SOL') - _create_connection_x(regions, 'lower_outer_PFR', 'lower_outer_SOL') - _create_connection_y(regions, 'lower_inner_PFR', 'lower_outer_PFR') - _create_connection_y(regions, 'lower_inner_SOL', 'inner_SOL') - _create_connection_y(regions, 'inner_core', 'outer_core') - _create_connection_y(regions, 'outer_core', 'inner_core') - _create_connection_y(regions, 'inner_SOL', 'upper_inner_SOL') - _create_connection_y(regions, 'upper_outer_PFR', 'upper_inner_PFR') - _create_connection_y(regions, 'upper_outer_SOL', 'outer_SOL') - _create_connection_y(regions, 'outer_SOL', 'lower_outer_SOL') + ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', + connect_lower='outer_SOL') + _check_connections(regions) elif topology == 'single-null': regions['inner_PFR'] = Region( name='inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=0, - yupper_ind=jys11 + 1) + yupper_ind=jys11 + 1, connect_outer='inner_SOL', connect_upper='outer_PFR') regions['inner_SOL'] = Region( name='inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=0, - yupper_ind=jys11 + 1) + yupper_ind=jys11 + 1, connect_inner='inner_PFR', connect_upper='SOL') regions['core'] = Region( name='core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys11 + 1, - yupper_ind=jys22 + 1) + yupper_ind=jys22 + 1, connect_outer='SOL', connect_lower='core', + connect_upper='core') regions['SOL'] = Region( name='SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=jys11 + 1, - yupper_ind=jys22 + 1) + yupper_ind=jys22 + 1, connect_inner='core', connect_lower='inner_SOL', + connect_upper='outer_SOL') regions['outer_PFR'] = Region( - name='lower_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny) + name='outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='outer_SOL', + connect_lower='inner_PFR') regions['outer_SOL'] = Region( - name='lower_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny) - _create_connection_x(regions, 'inner_PFR', 'inner_SOL') - _create_connection_x(regions, 'core', 'SOL') - _create_connection_x(regions, 'outer_PFR', 'outer_SOL') - _create_connection_y(regions, 'inner_PFR', 'outer_PFR') - _create_connection_y(regions, 'inner_SOL', 'SOL') - _create_connection_y(regions, 'core', 'core') - _create_connection_y(regions, 'SOL', 'outer_SOL') + name='outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, + ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='outer_PFR', + connect_lower='SOL') + _check_connections(regions) elif topology == 'limiter': regions['core'] = Region( name='core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=ybndry, - yupper_ind=ny - ybndry) + yupper_ind=ny - ybndry, connect_outer='SOL', connect_lower='core', + connect_upper='core') regions['SOL'] = Region( name='SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=0, - yupper_ind=ny) - _create_connection_x(regions, 'core', 'SOL') - _create_connection_y(regions, 'core', 'core') + yupper_ind=ny, connect_inner='core') + _check_connections(regions) elif topology == 'core': regions['core'] = Region( name='core', ds=ds, xinner_ind=0, xouter_ind=nx, ylower_ind=ybndry, - yupper_ind=ny - ybndry) - _create_connection_y(regions, 'core', 'core') + yupper_ind=ny - ybndry, connect_lower='core', connect_upper='core') + _check_connections(regions) elif topology == 'sol': regions['SOL'] = Region( name='SOL', ds=ds, xinner_ind=0, xouter_ind=nx, ylower_ind=0, yupper_ind=ny) + _check_connections(regions) elif topology == 'xpoint': regions['lower_inner_PFR'] = Region( name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=0, yupper_ind=jys11 + 1) + ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_SOL', + connect_upper='lower_outer_PFR') regions['lower_inner_SOL'] = Region( name='lower_inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=0, yupper_ind=jys11 + 1) + ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', + connect_upper='upper_inner_SOL') regions['upper_inner_PFR'] = Region( name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys11 + 1, yupper_ind=nyinner) + ylower_ind=jys11 + 1, yupper_ind=nyinner, connect_outer='upper_inner_SOL', + connect_lower='upper_outer_PFR') regions['upper_inner_SOL'] = Region( name='upper_inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=jys11 + 1, yupper_ind=nyinner) + ylower_ind=jys11 + 1, yupper_ind=nyinner, connect_inner='upper_inner_PFR', + connect_lower='lower_inner_SOL') regions['upper_outer_PFR'] = Region( name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=nyinner, yupper_ind=jys22 + 1) + ylower_ind=nyinner, yupper_ind=jys22 + 1, connect_outer='upper_outer_SOL', + connect_upper='upper_inner_PFR') regions['upper_outer_SOL'] = Region( name='upper_outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=nyinner, yupper_ind=jys22 + 1) + ylower_ind=nyinner, yupper_ind=jys22 + 1, connect_inner='upper_outer_PFR', + connect_upper='lower_outer_SOL') regions['lower_outer_PFR'] = Region( name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny) + ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_SOL', + connect_lower='lower_inner_PFR') regions['lower_outer_SOL'] = Region( name='lower_outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny) - _create_connection_x(regions, 'lower_inner_PFR', 'lower_inner_SOL') - _create_connection_x(regions, 'upper_inner_PFR', 'upper_inner_SOL') - _create_connection_x(regions, 'upper_outer_PFR', 'upper_outer_SOL') - _create_connection_x(regions, 'lower_outer_PFR', 'lower_outer_SOL') - _create_connection_y(regions, 'lower_inner_PFR', 'lower_outer_PFR') - _create_connection_y(regions, 'lower_inner_SOL', 'upper_inner_SOL') - _create_connection_y(regions, 'upper_outer_PFR', 'upper_inner_PFR') - _create_connection_y(regions, 'upper_outer_SOL', 'lower_outer_SOL') + ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', + connect_lower='upper_outer_SOL') + _check_connections(regions) else: raise NotImplementedError("Topology '" + topology + "' is not implemented") From c155d70116916c657a7810d648913fd602a8a324 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 21:51:44 +0000 Subject: [PATCH 32/94] Don't drop 'x' when creating 'r' for s-alpha geometry 'x' coordinate is now not created until after add_*_geometry_coords is called, so don't need to drop 'x' before adding 'r'. --- xbout/geometries.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/xbout/geometries.py b/xbout/geometries.py index 4b9dc940..d7053032 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -264,10 +264,9 @@ def add_s_alpha_geometry_coords(ds, *, coordinates=None, grid=None): "geometry='s-alpha'") ds['r'] = ds['hthe'].isel({ycoord: 0}).squeeze(drop=True) ds['r'].attrs['units'] = 'm' - # remove x-index coordinate, don't need when we have 'r' as a radial coordinate - ds = ds.drop('x') ds = ds.set_coords('r') ds = ds.rename(x='r') + ds.metadata['bout_xdim'] = 'r' if hthe_from_grid: # remove hthe because it does not have correct metadata From 9fdec4139bcc55f49e04b85d034d9efff86dc3bf Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 20 Mar 2020 21:58:46 +0000 Subject: [PATCH 33/94] Include 'dy' in all inputs for tests 'dy' now required for open_boutdataset --- xbout/tests/test_geometries.py | 4 ++++ xbout/tests/test_grid.py | 3 ++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/xbout/tests/test_geometries.py b/xbout/tests/test_geometries.py index 61ebc5b6..9ab6038e 100644 --- a/xbout/tests/test_geometries.py +++ b/xbout/tests/test_geometries.py @@ -1,3 +1,5 @@ +import numpy as np + from xarray import Dataset, DataArray from xarray.testing import assert_equal import pytest @@ -21,6 +23,8 @@ def add_schwarzschild_coords(ds, coordinates=None): assert "Schwarzschild" in REGISTERED_GEOMETRIES.keys() original = Dataset() + original['dy'] = DataArray(np.ones((3,4)), dims=('x', 'y')) + original.attrs['metadata'] = {} updated = apply_geometry(ds=original, geometry_name="Schwarzschild") assert_equal(updated['event_horizon'], DataArray(4.0)) diff --git a/xbout/tests/test_grid.py b/xbout/tests/test_grid.py index a1e079f3..175d5951 100644 --- a/xbout/tests/test_grid.py +++ b/xbout/tests/test_grid.py @@ -20,7 +20,8 @@ def create_example_grid_file(tmpdir_factory): # Create grid dataset arr = np.arange(6).reshape(2, 3) - grid = DataArray(data=arr, dims=['x', 'y']) + grid = DataArray(data=arr, name='arr', dims=['x', 'y']).to_dataset() + grid['dy'] = DataArray(np.ones((2, 3)), dims=['x', 'y']) # Create temporary directory save_dir = tmpdir_factory.mktemp("griddata") From e66e4418c04fa91172fad690d21ca87ab9e7c786 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 00:02:01 +0000 Subject: [PATCH 34/94] Test for BoutDataArray.highParallelResRegion() in single-null topology --- xbout/tests/test_boutdataarray.py | 52 +++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index d84eefa0..cfc477ec 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -263,3 +263,55 @@ def f(t): expected = f(theta_fine).broadcast_like(n_highres) npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) + + def test_highParallelResRegion_singlenull(self, tmpdir_factory, + bout_xyt_example_files): + path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, + nype=3, nt=1, grid='grid', guards={'y':2}, + topology='single-null') + + ds = open_boutdataset(datapath=path, + gridfilepath=Path(path).parent.joinpath('grid.nc'), + geometry='toroidal', keep_yboundaries=True) + + n = ds['n'] + + thetalength = 2.*np.pi + + dtheta = thetalength/48. + theta = xr.DataArray(np.linspace(0. - 1.5*dtheta, thetalength + 1.5*dtheta, 52), + dims='theta') + + dtheta_fine = thetalength/3./128. + theta_fine = xr.DataArray( + np.linspace(0. + 0.5*dtheta_fine, thetalength - 0.5*dtheta_fine, 3*128), + dims='theta') + + def f(t): + t = np.sin(3.*t) + return (t**3 - t**2 + t - 1.) + + n.data = f(theta).broadcast_like(n) + + f_fine = f(theta_fine)[:128] + + for region in ['inner_PFR', 'inner_SOL']: + n_highres = n.bout.highParallelResRegion(region).isel(theta=slice(2, None)) + + expected = f_fine.broadcast_like(n_highres) + + npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) + + for region in ['core', 'SOL']: + n_highres = n.bout.highParallelResRegion(region) + + expected = f_fine.broadcast_like(n_highres) + + npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) + + for region in ['outer_PFR', 'outer_SOL']: + n_highres = n.bout.highParallelResRegion(region).isel(theta=slice( -2)) + + expected = f_fine.broadcast_like(n_highres) + + npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) From e736fe3aff85cf5c9655429e9b68fced4def3c97 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 00:18:09 +0000 Subject: [PATCH 35/94] Test for highParallelResRegion with different enhancement factors --- xbout/tests/test_boutdataarray.py | 39 +++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index cfc477ec..1cfc2da4 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -1,3 +1,5 @@ +import pytest + import numpy as np import numpy.testing as npt from pathlib import Path @@ -230,6 +232,43 @@ def f(t): npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) + @pytest.mark.parametrize('res_factor', [2, 3, 7, 18]) + def test_highParallelResRegion_core_change_n(self, tmpdir_factory, + bout_xyt_example_files, res_factor): + path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, + nype=1, nt=1, grid='grid', guards={'y':2}, + topology='core') + + ds = open_boutdataset(datapath=path, + gridfilepath=Path(path).parent.joinpath('grid.nc'), + geometry='toroidal', keep_yboundaries=True) + + n = ds['n'] + + thetalength = 2.*np.pi + + dtheta = thetalength/16. + theta = xr.DataArray(np.linspace(0. - 1.5*dtheta, thetalength + 1.5*dtheta, 20), + dims='theta') + + dtheta_fine = thetalength/res_factor/16. + theta_fine = xr.DataArray( + np.linspace(0. + dtheta_fine/2., thetalength - dtheta_fine/2., + res_factor*16), + dims='theta') + + def f(t): + t = np.sin(t) + return (t**3 - t**2 + t - 1.) + + n.data = f(theta).broadcast_like(n) + + n_highres = n.bout.highParallelResRegion('core', n=res_factor) + + expected = f(theta_fine).broadcast_like(n_highres) + + npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) + def test_highParallelResRegion_sol(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, nype=1, nt=1, grid='grid', guards={'y':2}, From 05fd4cda022a0f7040da8a95d00e5beb063f58f4 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 00:48:45 +0000 Subject: [PATCH 36/94] Test for BoutDataArray.highParallelRes() --- xbout/tests/test_boutdataarray.py | 40 +++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index 1cfc2da4..a29d5070 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -354,3 +354,43 @@ def f(t): expected = f_fine.broadcast_like(n_highres) npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) + + def test_highParallelRes(self, tmpdir_factory, bout_xyt_example_files): + path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, + nype=3, nt=1, grid='grid', guards={'y':2}, + topology='single-null') + + ds = open_boutdataset(datapath=path, + gridfilepath=Path(path).parent.joinpath('grid.nc'), + geometry='toroidal', keep_yboundaries=True) + + n = ds['n'] + + thetalength = 2.*np.pi + + dtheta = thetalength/48. + theta = xr.DataArray(np.linspace(0. - 1.5*dtheta, thetalength + 1.5*dtheta, 52), + dims='theta') + + dtheta_fine = thetalength/3./128. + theta_fine = xr.DataArray( + np.linspace(0. + 0.5*dtheta_fine, thetalength - 0.5*dtheta_fine, 3*128), + dims='theta') + x = xr.DataArray(np.arange(3), dims='x') + + def f_y(t): + t = np.sin(3.*t) + return (t**3 - t**2 + t - 1.) + + f = f_y(theta) * (x + 1.) + + n.data = f.broadcast_like(n) + + f_fine = f_y(theta_fine)*(x + 1.) + + n_highres_ds = n.bout.highParallelRes().isel(theta=slice(2, -2)) + + expected = f_fine.broadcast_like(n_highres_ds['n']) + + npt.assert_allclose(n_highres_ds['n'].values, expected.values, + rtol=0., atol=1.1e-2) From 456d2104fdcd8dafe439bc337925787f85388d66 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 00:49:27 +0000 Subject: [PATCH 37/94] Test for BoutDataset.getHghParallelResVars() --- xbout/tests/test_boutdataset.py | 332 ++++++++++++++++++++++++++++++++ xbout/tests/test_region.py | 16 +- 2 files changed, 341 insertions(+), 7 deletions(-) diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index ffe40b1f..bbcf1c8a 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -1,11 +1,14 @@ import pytest +import numpy.testing as npt from xarray import Dataset, DataArray, concat, open_dataset, open_mfdataset import xarray.testing as xrt import numpy as np from pathlib import Path from xbout.tests.test_load import bout_xyt_example_files, create_bout_ds +from xbout.tests.test_region import (params_guards, params_guards_values, + params_boundaries, params_boundaries_values) from xbout import BoutDatasetAccessor, open_boutdataset from xbout.geometries import apply_geometry from xbout.utils import _set_attrs_on_all_vars @@ -97,6 +100,335 @@ def test_resetParallelInterpFactor(self): assert ds.metadata['fine_interpolation_factor'] == 42 assert ds['a'].metadata['fine_interpolation_factor'] == 42 + @pytest.mark.parametrize(params_guards, params_guards_values) + @pytest.mark.parametrize(params_boundaries, params_boundaries_values) + def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, + guards, keep_xboundaries, keep_yboundaries): + # This test checks that the regions created in the new high-resolution Dataset by + # getHighParallelResVars are correct. + # This test does not test the accuracy of the parallel interpolation (there are + # other tests for that). + + # Note using more than MXG x-direction points and MYG y-direction points per + # output file ensures tests for whether boundary cells are present do not fail + # when using minimal numbers of processors + path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 4, 7), nxpe=3, + nype=6, nt=1, guards=guards, grid='grid', + topology='disconnected-double-null') + + ds = open_boutdataset(datapath=path, + gridfilepath=Path(path).parent.joinpath('grid.nc'), + geometry='toroidal', keep_xboundaries=keep_xboundaries, + keep_yboundaries=keep_yboundaries) + + # Get high parallel resolution version of ds, and check that + ds = ds.bout.getHighParallelResVars(('n', 'T')) + + mxg = guards['x'] + myg = guards['y'] + + if keep_xboundaries: + ixs1 = ds.metadata['ixseps1'] + else: + ixs1 = ds.metadata['ixseps1'] - guards['x'] + + if keep_xboundaries: + ixs2 = ds.metadata['ixseps2'] + else: + ixs2 = ds.metadata['ixseps2'] - guards['x'] + + if keep_yboundaries: + ybndry = guards['y'] + else: + ybndry = 0 + jys11 = ds.metadata['jyseps1_1'] + ybndry + jys21 = ds.metadata['jyseps2_1'] + ybndry + ny_inner = ds.metadata['ny_inner'] + 2*ybndry + jys12 = ds.metadata['jyseps1_2'] + 3*ybndry + jys22 = ds.metadata['jyseps2_2'] + 3*ybndry + ny = ds.metadata['ny'] + 4*ybndry + + for var in ['n', 'T']: + v = ds[var] + + v_lower_inner_PFR = v.bout.fromRegion('lower_inner_PFR') + + # Remove attributes that are expected to be different + del v_lower_inner_PFR.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 + mxg), theta=slice(jys11 + 1)), + v_lower_inner_PFR.isel( + theta=slice(-myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys22 + 1, jys22 + 1 + myg)).values, + v_lower_inner_PFR.isel(theta=slice(-myg, None)).values) + + v_lower_inner_intersep = v.bout.fromRegion('lower_inner_intersep') + + # Remove attributes that are expected to be different + del v_lower_inner_intersep.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys11 + 1)), + v_lower_inner_intersep.isel( + theta=slice(-myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys11 + 1, jys11 + 1 + myg)).values, + v_lower_inner_intersep.isel(theta=slice(-myg, None)).values) + + v_lower_inner_SOL = v.bout.fromRegion('lower_inner_SOL') + + # Remove attributes that are expected to be different + del v_lower_inner_SOL.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys11 + 1)), + v_lower_inner_SOL.isel( + theta=slice(-myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys11 + 1, jys11 + 1 + myg)).values, + v_lower_inner_SOL.isel(theta=slice(-myg, None)).values) + + v_inner_core = v.bout.fromRegion('inner_core') + + # Remove attributes that are expected to be different + del v_inner_core.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys11 + 1, jys21 + 1)), + v_inner_core.isel( + theta=slice(myg, -myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys22 + 1 - myg, jys22 + 1)).values, + v_inner_core.isel(theta=slice(myg)).values) + npt.assert_equal(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys12 + 1, jys12 + 1 + myg)).values, + v_inner_core.isel(theta=slice(-myg, None)).values) + + v_inner_intersep = v.bout.fromRegion('inner_intersep') + + # Remove attributes that are expected to be different + del v_inner_intersep.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys11 + 1, jys21 + 1)), + v_inner_intersep.isel( + theta=slice(myg, -myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys11 + 1 - myg, jys11 + 1)).values, + v_inner_intersep.isel(theta=slice(myg)).values) + npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys12 + 1, jys12 + 1 + myg)).values, + v_inner_intersep.isel(theta=slice(-myg, None)).values) + + v_inner_sol = v.bout.fromRegion('inner_SOL') + + # Remove attributes that are expected to be different + del v_inner_sol.attrs['region'] + xrt.assert_identical( + v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys11 + 1, jys21 + 1)), + v_inner_sol.isel(theta=slice(myg, -myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys11 + 1 - myg, jys11 + 1)).values, + v_inner_sol.isel(theta=slice(myg)).values) + npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys21 + 1, jys21 + 1 + myg)).values, + v_inner_sol.isel(theta=slice(-myg, None)).values) + + v_upper_inner_PFR = v.bout.fromRegion('upper_inner_PFR') + + # Remove attributes that are expected to be different + del v_upper_inner_PFR.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys21 + 1, ny_inner)), + v_upper_inner_PFR.isel(theta=slice(myg, None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys12 + 1 - myg, jys12 + 1)).values, + v_upper_inner_PFR.isel(theta=slice(myg)).values) + + v_upper_inner_intersep = v.bout.fromRegion('upper_inner_intersep') + + # Remove attributes that are expected to be different + del v_upper_inner_intersep.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys21 + 1, ny_inner)), + v_upper_inner_intersep.isel(theta=slice(myg, None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys12 + 1 - myg, jys12 + 1)).values, + v_upper_inner_intersep.isel(theta=slice(myg)).values) + + v_upper_inner_SOL = v.bout.fromRegion('upper_inner_SOL') + + # Remove attributes that are expected to be different + del v_upper_inner_SOL.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys21 + 1, ny_inner)), + v_upper_inner_SOL.isel(theta=slice(myg, None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys21 + 1 - myg, jys21 + 1)).values, + v_upper_inner_SOL.isel(theta=slice(myg)).values) + + v_upper_outer_PFR = v.bout.fromRegion('upper_outer_PFR') + + # Remove attributes that are expected to be different + del v_upper_outer_PFR.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 + mxg), + theta=slice(ny_inner, jys12 + 1)), + v_upper_outer_PFR.isel( + theta=slice(-myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys21 + 1, jys21 + 1 + myg)).values, + v_upper_outer_PFR.isel(theta=slice(-myg, None)).values) + + v_upper_outer_intersep = v.bout.fromRegion('upper_outer_intersep') + + # Remove attributes that are expected to be different + del v_upper_outer_intersep.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(ny_inner, jys12 + 1)), + v_upper_outer_intersep.isel( + theta=slice(-myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys21 + 1, jys21 + 1 + myg)).values, + v_upper_outer_intersep.isel(theta=slice(-myg, None)).values) + + v_upper_outer_SOL = v.bout.fromRegion('upper_outer_SOL') + + # Remove attributes that are expected to be different + del v_upper_outer_SOL.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(ny_inner, jys12 + 1)), + v_upper_outer_SOL.isel( + theta=slice(-myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys12 + 1, jys12 + 1 + myg)).values, + v_upper_outer_SOL.isel(theta=slice(-myg, None)).values) + + v_outer_core = v.bout.fromRegion('outer_core') + + # Remove attributes that are expected to be different + del v_outer_core.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys12 + 1, jys22 + 1)), + v_outer_core.isel( + theta=slice(myg, -myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys21 + 1 - myg, jys21 + 1)).values, + v_outer_core.isel(theta=slice(myg)).values) + npt.assert_equal(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys11 + 1, jys11 + 1 + myg)).values, + v_outer_core.isel(theta=slice(-myg, None)).values) + + v_outer_intersep = v.bout.fromRegion('outer_intersep') + + # Remove attributes that are expected to be different + del v_outer_intersep.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys12 + 1, jys22 + 1)), + v_outer_intersep.isel( + theta=slice(myg, -myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys21 + 1 - myg, jys21 + 1)).values, + v_outer_intersep.isel(theta=slice(myg)).values) + npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys22 + 1, jys22 + 1 + myg)).values, + v_outer_intersep.isel(theta=slice(-myg, None)).values) + + v_outer_sol = v.bout.fromRegion('outer_SOL') + + # Remove attributes that are expected to be different + del v_outer_sol.attrs['region'] + xrt.assert_identical( + v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys12 + 1, jys22 + 1)), + v_outer_sol.isel(theta=slice(myg, -myg if myg != 0 else None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys12 + 1 - myg, jys12 + 1)).values, + v_outer_sol.isel(theta=slice(myg)).values) + npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys22 + 1, jys22 + 1 + myg)).values, + v_outer_sol.isel(theta=slice(-myg, None)).values) + + v_lower_outer_PFR = v.bout.fromRegion('lower_outer_PFR') + + # Remove attributes that are expected to be different + del v_lower_outer_PFR.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys22 + 1, None)), + v_lower_outer_PFR.isel(theta=slice(myg, None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 + mxg), + theta=slice(jys11 + 1 - myg, jys11 + 1)).values, + v_lower_outer_PFR.isel(theta=slice(myg)).values) + + v_lower_outer_intersep = v.bout.fromRegion('lower_outer_intersep') + + # Remove attributes that are expected to be different + del v_lower_outer_intersep.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys22 + 1, None)), + v_lower_outer_intersep.isel(theta=slice(myg, None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), + theta=slice(jys22 + 1 - myg, jys22 + 1)).values, + v_lower_outer_intersep.isel(theta=slice(myg)).values) + + v_lower_outer_SOL = v.bout.fromRegion('lower_outer_SOL') + + # Remove attributes that are expected to be different + del v_lower_outer_SOL.attrs['region'] + xrt.assert_identical(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys22 + 1, None)), + v_lower_outer_SOL.isel(theta=slice(myg, None))) + if myg > 0: + # check y-guards, which were 'communicated' by fromRegion + # Coordinates are not equal, so only compare array values + npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys22 + 1 - myg, jys22 + 1)).values, + v_lower_outer_SOL.isel(theta=slice(myg)).values) + class TestLoadInputFile: @pytest.mark.skip diff --git a/xbout/tests/test_region.py b/xbout/tests/test_region.py index fcdd6618..bae98c61 100644 --- a/xbout/tests/test_region.py +++ b/xbout/tests/test_region.py @@ -9,14 +9,14 @@ from xbout import open_boutdataset -class TestRegion: +params_guards = "guards" +params_guards_values = [{'x': 0, 'y': 0}, {'x': 2, 'y': 0}, {'x': 0, 'y': 2}, + {'x': 2, 'y': 2}] +params_boundaries = "keep_xboundaries, keep_yboundaries" +params_boundaries_values = [(False, False), (True, False), (False, True), + (True, True)] - params_guards = "guards" - params_guards_values = [{'x': 0, 'y': 0}, {'x': 2, 'y': 0}, {'x': 0, 'y': 2}, - {'x': 2, 'y': 2}] - params_boundaries = "keep_xboundaries, keep_yboundaries" - params_boundaries_values = [(False, False), (True, False), (False, True), - (True, True)] +class TestRegion: @pytest.mark.parametrize(params_guards, params_guards_values) @pytest.mark.parametrize(params_boundaries, params_boundaries_values) @@ -1283,3 +1283,5 @@ def test_region_disconnecteddoublenull_get_one_guard( npt.assert_equal(n.isel(x=slice(ixs2 - xguards, None), theta=slice(jys22 + 1 - yguards, jys22 + 1)).values, n_lower_outer_SOL.isel(theta=slice(yguards)).values) + + From fda57eaf04a0af4c5c83ef05e6ca1160ee3e6d70 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 00:55:47 +0000 Subject: [PATCH 38/94] Remove region attribute from result in BoutDataArray.highParallelRes() --- xbout/boutdataarray.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index f26f0319..52636882 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -519,6 +519,12 @@ def highParallelRes(self, **kwargs): result = xr.combine_by_coords(parts) result.attrs = parts[0].attrs + # result has all regions, so should not have a region attribute + if 'region' in result.attrs: + del result.attrs['region'] + if 'region' in result[self.data.name].attrs: + del result[self.data.name].attrs['region'] + return result From c69f81e96062977e158766b5d989dbed5d7d15ca Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 01:11:56 +0000 Subject: [PATCH 39/94] Fix merge of Datasets in BoutDataset.getHighParallelResVars() Previously was not assigning the merged result back to the result. --- xbout/boutdataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 9a1c10fa..f69cfd66 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -129,7 +129,7 @@ def getHighParallelResVars(self, variables, **kwargs): # passed-in argument. ds = self.data[variables[0]].bout.highParallelRes(**kwargs) for var in variables[1:]: - ds.merge(self.data[var].bout.highParallelRes(**kwargs)) + ds = ds.merge(self.data[var].bout.highParallelRes(**kwargs)) # Add extra variables needed to make this a valid Dataset ds['dx'] = self.data['dx'].bout.highParallelRes(**kwargs)['dx'] From 8487f3f081a789c85f82a2c9154587e484ce7ecb Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 14:03:48 +0000 Subject: [PATCH 40/94] Reduce grid sizes to speed up parallel interpolation tests --- xbout/tests/test_boutdataarray.py | 10 +++++----- xbout/tests/test_boutdataset.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index a29d5070..72bb0132 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -199,7 +199,7 @@ def test_fromFieldAligned(self, tmpdir_factory, bout_xyt_example_files): npt.assert_allclose(n_nal[t, 1, 3, 6].values, 1000.*t + 100.*1 + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 def test_highParallelResRegion_core(self, tmpdir_factory, bout_xyt_example_files): - path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, + path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=1, nt=1, grid='grid', guards={'y':2}, topology='core') @@ -235,7 +235,7 @@ def f(t): @pytest.mark.parametrize('res_factor', [2, 3, 7, 18]) def test_highParallelResRegion_core_change_n(self, tmpdir_factory, bout_xyt_example_files, res_factor): - path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, + path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=1, nt=1, grid='grid', guards={'y':2}, topology='core') @@ -270,7 +270,7 @@ def f(t): npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) def test_highParallelResRegion_sol(self, tmpdir_factory, bout_xyt_example_files): - path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, + path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=1, nt=1, grid='grid', guards={'y':2}, topology='sol') @@ -305,7 +305,7 @@ def f(t): def test_highParallelResRegion_singlenull(self, tmpdir_factory, bout_xyt_example_files): - path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, + path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=3, nt=1, grid='grid', guards={'y':2}, topology='single-null') @@ -356,7 +356,7 @@ def f(t): npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) def test_highParallelRes(self, tmpdir_factory, bout_xyt_example_files): - path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 16, 7), nxpe=1, + path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=3, nt=1, grid='grid', guards={'y':2}, topology='single-null') diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index bbcf1c8a..82802980 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -112,7 +112,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, # Note using more than MXG x-direction points and MYG y-direction points per # output file ensures tests for whether boundary cells are present do not fail # when using minimal numbers of processors - path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 4, 7), nxpe=3, + path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 4, 3), nxpe=3, nype=6, nt=1, guards=guards, grid='grid', topology='disconnected-double-null') From 73fe45763303d9740280d318f94841c4c88f5386 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 14:14:19 +0000 Subject: [PATCH 41/94] Test toroidal_points argument for highParallelRes, highParallelResRegion --- xbout/tests/test_boutdataarray.py | 35 +++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index 72bb0132..ba325547 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -5,6 +5,7 @@ from pathlib import Path import xarray as xr +import xarray.testing as xrt from xarray.core.utils import dict_equiv from xbout.tests.test_load import bout_xyt_example_files, create_bout_ds @@ -394,3 +395,37 @@ def f_y(t): npt.assert_allclose(n_highres_ds['n'].values, expected.values, rtol=0., atol=1.1e-2) + + def test_highParallelRes_toroidal_points(self, tmpdir_factory, bout_xyt_example_files): + path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, + nype=3, nt=1, grid='grid', guards={'y':2}, + topology='single-null') + + ds = open_boutdataset(datapath=path, + gridfilepath=Path(path).parent.joinpath('grid.nc'), + geometry='toroidal', keep_yboundaries=True) + + n_highres_ds = ds['n'].bout.highParallelRes() + + n_highres_ds_truncated = ds['n'].bout.highParallelRes(toroidal_points=2) + + xrt.assert_identical(n_highres_ds_truncated, n_highres_ds.isel(zeta=[0, 2])) + + def test_highParallelRes_toroidal_points_list(self, tmpdir_factory, + bout_xyt_example_files): + path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, + nype=3, nt=1, grid='grid', guards={'y':2}, + topology='single-null') + + ds = open_boutdataset(datapath=path, + gridfilepath=Path(path).parent.joinpath('grid.nc'), + geometry='toroidal', keep_yboundaries=True) + + n_highres_ds = ds['n'].bout.highParallelRes() + + points_list = [1, 2] + + n_highres_ds_truncated = ds['n'].bout.highParallelRes( + toroidal_points=points_list) + + xrt.assert_identical(n_highres_ds_truncated, n_highres_ds.isel(zeta=points_list)) From f7725302dfe9cdd52eefa0d03ae1bad5822edf5b Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 14:26:50 +0000 Subject: [PATCH 42/94] Fix for when toroidal_points does not divide nz exactly Want to return at most toroidal_points points from highParallelResRegion, but using a stride of nz//toroidal points can sometimes return more points. Fix by using a stride of (nz + toroidal_points - 1)//toroidal_points. --- xbout/boutdataarray.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 52636882..421f0648 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -394,8 +394,10 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, BoutDataset.setupParallelInterp(), or 10 if that has not been called. toroidal_points : int or sequence of int, optional If int, number of toroidal points to output, applies a stride to toroidal - direction to save memory usage. If sequence of int, the indexes of toroidal - points for the output. + direction to save memory usage. It is not always possible to get a particular + number of output points with a constant stride, so the number of outputs will + be only less than or equal to toroidal_points. If sequence of int, the indexes + of toroidal points for the output. method : str, optional The interpolation method to use. Options from xarray.DataArray.interp(), currently: linear, nearest, zero, slinear, quadratic, cubic. Default is @@ -472,7 +474,7 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, if toroidal_points is not None and zcoord in da.sizes: if isinstance(toroidal_points, int): nz = len(da[zcoord]) - zstride = nz//toroidal_points + zstride = (nz + toroidal_points - 1)//toroidal_points da = da.isel(**{zcoord: slice(None, None, zstride)}) else: da = da.isel(**{zcoord: toroidal_points}) From ccf418e8c38ddfc2fc870a6c4aae082e3a516fb5 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 16:02:19 +0000 Subject: [PATCH 43/94] Add pytest.mark.long to disable long tests by default Mark a test as 'long' by using the mark 'pytest.mark.long'. 'long' tests are skipped by default, and can be enabled by passing the option '--long' on the command line. Long tests are enabled on Travis. Most of the combinations of inputs for the Region and parallel interpolation tests are marked as long, to speed up pytest runs. --- .travis.yml | 2 +- conftest.py | 15 +++++++++++++++ pytest.ini | 3 +++ xbout/tests/test_boutdataarray.py | 7 ++++++- xbout/tests/test_region.py | 13 +++++++++++-- 5 files changed, 36 insertions(+), 4 deletions(-) create mode 100644 conftest.py diff --git a/.travis.yml b/.travis.yml index 524410f6..838ad04d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,6 @@ install: - pip install -r requirements.txt - pip install -e . script: - - pytest -v --cov + - pytest -v --long --cov after_success: - codecov diff --git a/conftest.py b/conftest.py new file mode 100644 index 00000000..63267763 --- /dev/null +++ b/conftest.py @@ -0,0 +1,15 @@ +import pytest + +# Add command line option '--long' for pytest, to be used to enable long tests +def pytest_addoption(parser): + parser.addoption("--long", action="store_true", default=False, + help="enable tests marked as 'long'") + +def pytest_collection_modifyitems(config, items): + if not config.getoption("--long"): + # --long not given in cli: skip long tests + print("\n skipping long tests, pass '--long' to enable") + skip_long = pytest.mark.skip(reason="need --long option to run") + for item in items: + if "long" in item.keywords: + item.add_marker(skip_long) diff --git a/pytest.ini b/pytest.ini index ac2cecc2..0b2d89fe 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,3 +2,6 @@ filterwarnings = ignore:No geometry type found, no coordinates will be added:UserWarning ignore:deallocating CachingFileManager.*, but file is not already closed. This may indicate a bug\.:RuntimeWarning + +markers = + long: long test, or one of many permutations (disabled by default) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index ba325547..a11a79a6 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -199,6 +199,7 @@ def test_fromFieldAligned(self, tmpdir_factory, bout_xyt_example_files): npt.assert_allclose(n_nal[t, 1, 3, 5].values, 1000.*t + 100.*1 + 10.*3. + 5., rtol=1.e-15, atol=0.) # noqa: E501 npt.assert_allclose(n_nal[t, 1, 3, 6].values, 1000.*t + 100.*1 + 10.*3. + 6., rtol=1.e-15, atol=0.) # noqa: E501 + @pytest.mark.long def test_highParallelResRegion_core(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=1, nt=1, grid='grid', guards={'y':2}, @@ -233,7 +234,10 @@ def f(t): npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) - @pytest.mark.parametrize('res_factor', [2, 3, 7, 18]) + @pytest.mark.parametrize('res_factor', [pytest.param(2, marks=pytest.mark.long), + 3, + pytest.param(7, marks=pytest.mark.long), + pytest.param(18, marks=pytest.mark.long)]) def test_highParallelResRegion_core_change_n(self, tmpdir_factory, bout_xyt_example_files, res_factor): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, @@ -270,6 +274,7 @@ def f(t): npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) + @pytest.mark.long def test_highParallelResRegion_sol(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=1, nt=1, grid='grid', guards={'y':2}, diff --git a/xbout/tests/test_region.py b/xbout/tests/test_region.py index bae98c61..a386b67b 100644 --- a/xbout/tests/test_region.py +++ b/xbout/tests/test_region.py @@ -10,14 +10,19 @@ params_guards = "guards" -params_guards_values = [{'x': 0, 'y': 0}, {'x': 2, 'y': 0}, {'x': 0, 'y': 2}, +params_guards_values = [pytest.param({'x': 0, 'y': 0}, marks=pytest.mark.long), + pytest.param({'x': 2, 'y': 0}, marks=pytest.mark.long), + pytest.param({'x': 0, 'y': 2}, marks=pytest.mark.long), {'x': 2, 'y': 2}] params_boundaries = "keep_xboundaries, keep_yboundaries" -params_boundaries_values = [(False, False), (True, False), (False, True), +params_boundaries_values = [pytest.param(False, False, marks=pytest.mark.long), + pytest.param(True, False, marks=pytest.mark.long), + pytest.param(False, True, marks=pytest.mark.long), (True, True)] class TestRegion: + @pytest.mark.long @pytest.mark.parametrize(params_guards, params_guards_values) @pytest.mark.parametrize(params_boundaries, params_boundaries_values) def test_region_core(self, tmpdir_factory, bout_xyt_example_files, guards, @@ -53,6 +58,7 @@ def test_region_core(self, tmpdir_factory, bout_xyt_example_files, guards, n.isel(theta=slice(ybndry, -ybndry if ybndry != 0 else None)), n_core.isel(theta=slice(ybndry, -ybndry if ybndry != 0 else None))) + @pytest.mark.long @pytest.mark.parametrize(params_guards, params_guards_values) @pytest.mark.parametrize(params_boundaries, params_boundaries_values) def test_region_sol(self, tmpdir_factory, bout_xyt_example_files, guards, @@ -133,6 +139,7 @@ def test_region_limiter(self, tmpdir_factory, bout_xyt_example_files, guards, theta=slice(ybndry, -ybndry if ybndry != 0 else None)), n_core.isel(theta=slice(ybndry, -ybndry if ybndry != 0 else None))) + @pytest.mark.long @pytest.mark.parametrize(params_guards, params_guards_values) @pytest.mark.parametrize(params_boundaries, params_boundaries_values) def test_region_xpoint(self, tmpdir_factory, bout_xyt_example_files, guards, @@ -282,6 +289,7 @@ def test_region_xpoint(self, tmpdir_factory, bout_xyt_example_files, guards, theta=slice(jys2 + 1 - myg, jys2 + 1)).values, n_lower_outer_SOL.isel(theta=slice(myg)).values) + @pytest.mark.long @pytest.mark.parametrize(params_guards, params_guards_values) @pytest.mark.parametrize(params_boundaries, params_boundaries_values) def test_region_singlenull(self, tmpdir_factory, bout_xyt_example_files, guards, @@ -403,6 +411,7 @@ def test_region_singlenull(self, tmpdir_factory, bout_xyt_example_files, guards, theta=slice(jys2 + 1 - myg, jys2 + 1)).values, n_outer_SOL.isel(theta=slice(myg)).values) + @pytest.mark.long @pytest.mark.parametrize(params_guards, params_guards_values) @pytest.mark.parametrize(params_boundaries, params_boundaries_values) def test_region_connecteddoublenull(self, tmpdir_factory, bout_xyt_example_files, From 6ff08f8e669d8e0c9cd2ae115563f7882cb38d37 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 17:33:10 +0000 Subject: [PATCH 44/94] Return DataArray by default from BoutDataArray.highParallelRes() With option to return as a Dataset instead. --- xbout/boutdataarray.py | 14 +++++++++++--- xbout/boutdataset.py | 11 +++++++---- xbout/tests/test_boutdataarray.py | 19 +++++++++---------- 3 files changed, 27 insertions(+), 17 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 421f0648..d24c14a1 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -482,7 +482,7 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, return da - def highParallelRes(self, **kwargs): + def highParallelRes(self, return_dataset=False, **kwargs): """ Interpolate in the parallel direction to get a higher resolution version of the variable. @@ -500,10 +500,14 @@ def highParallelRes(self, **kwargs): The interpolation method to use. Options from xarray.DataArray.interp(), currently: linear, nearest, zero, slinear, quadratic, cubic. Default is 'cubic'. + return_dataset : bool, optional + If this is set to True, return a Dataset containing this variable as a member + (by default returns a DataArray) Returns ------- - A new Dataset containing a high-resolution version of the variable. + A new DataArray containing a high-resolution version of the variable. (If + return_dataset=True, instead returns a Dataset containing the DataArray.) """ # xr.combine_by_coords does not keep attrs at the moment. See @@ -527,7 +531,11 @@ def highParallelRes(self, **kwargs): if 'region' in result[self.data.name].attrs: del result[self.data.name].attrs['region'] - return result + if return_dataset: + return result + else: + # Extract the DataArray to return + return result[self.data.name] def animate2D(self, animate_over='t', x=None, y=None, animate=True, fps=10, diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index f69cfd66..22757102 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -123,16 +123,19 @@ def getHighParallelResVars(self, variables, **kwargs): Dataset is a valid BoutDataset, although containing only the specified variables. """ if isinstance(variables, str): - ds = self.data[variables].bout.highParallelRes(**kwargs) + ds = self.data[variables].bout.highParallelRes(return_dataset=True, **kwargs) else: # Need to start with a Dataset with attrs as merge() drops the attrs of the # passed-in argument. - ds = self.data[variables[0]].bout.highParallelRes(**kwargs) + ds = self.data[variables[0]].bout.highParallelRes(return_dataset=True, + **kwargs) for var in variables[1:]: - ds = ds.merge(self.data[var].bout.highParallelRes(**kwargs)) + ds = ds.merge(self.data[var].bout.highParallelRes(return_dataset=True, + **kwargs)) # Add extra variables needed to make this a valid Dataset - ds['dx'] = self.data['dx'].bout.highParallelRes(**kwargs)['dx'] + ds['dx'] = self.data['dx'].bout.highParallelRes(return_dataset=True, + **kwargs)['dx'] # dy needs to be compatible with the new poloidal coordinate # dy was created as a coordinate in BoutDataArray.highParallelResRegion, here just diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index a11a79a6..8b0df9c3 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -394,11 +394,11 @@ def f_y(t): f_fine = f_y(theta_fine)*(x + 1.) - n_highres_ds = n.bout.highParallelRes().isel(theta=slice(2, -2)) + n_highres = n.bout.highParallelRes().isel(theta=slice(2, -2)) - expected = f_fine.broadcast_like(n_highres_ds['n']) + expected = f_fine.broadcast_like(n_highres) - npt.assert_allclose(n_highres_ds['n'].values, expected.values, + npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.1e-2) def test_highParallelRes_toroidal_points(self, tmpdir_factory, bout_xyt_example_files): @@ -410,11 +410,11 @@ def test_highParallelRes_toroidal_points(self, tmpdir_factory, bout_xyt_example_ gridfilepath=Path(path).parent.joinpath('grid.nc'), geometry='toroidal', keep_yboundaries=True) - n_highres_ds = ds['n'].bout.highParallelRes() + n_highres = ds['n'].bout.highParallelRes() - n_highres_ds_truncated = ds['n'].bout.highParallelRes(toroidal_points=2) + n_highres_truncated = ds['n'].bout.highParallelRes(toroidal_points=2) - xrt.assert_identical(n_highres_ds_truncated, n_highres_ds.isel(zeta=[0, 2])) + xrt.assert_identical(n_highres_truncated, n_highres.isel(zeta=[0, 2])) def test_highParallelRes_toroidal_points_list(self, tmpdir_factory, bout_xyt_example_files): @@ -426,11 +426,10 @@ def test_highParallelRes_toroidal_points_list(self, tmpdir_factory, gridfilepath=Path(path).parent.joinpath('grid.nc'), geometry='toroidal', keep_yboundaries=True) - n_highres_ds = ds['n'].bout.highParallelRes() + n_highres = ds['n'].bout.highParallelRes() points_list = [1, 2] - n_highres_ds_truncated = ds['n'].bout.highParallelRes( - toroidal_points=points_list) + n_highres_truncated = ds['n'].bout.highParallelRes(toroidal_points=points_list) - xrt.assert_identical(n_highres_ds_truncated, n_highres_ds.isel(zeta=points_list)) + xrt.assert_identical(n_highres_truncated, n_highres.isel(zeta=points_list)) From 236402c480ac1535425dd9b209157761a2aaa814 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 22:58:12 +0000 Subject: [PATCH 45/94] Mark most permutations of to/fromFieldAligned tests as 'long' --- xbout/tests/test_boutdataarray.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index 20c71504..0febb3b2 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -25,7 +25,10 @@ def test_to_dataset(self, tmpdir_factory, bout_xyt_example_files): assert dict_equiv(ds.attrs, new_ds.attrs) assert dict_equiv(ds.metadata, new_ds.metadata) - @pytest.mark.parametrize('nz', [6, 7, 8, 9]) + @pytest.mark.parametrize('nz', [pytest.param(6, marks=pytest.mark.long), + 7, + pytest.param(8, marks=pytest.mark.long), + pytest.param(9, marks=pytest.mark.long)]) def test_toFieldAligned(self, tmpdir_factory, bout_xyt_example_files, nz): path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 4, nz), nxpe=1, nype=1, nt=1) @@ -74,7 +77,10 @@ def test_toFieldAligned(self, tmpdir_factory, bout_xyt_example_files, nz): for z in range(nz): npt.assert_allclose(n_al[t, 1, 3, z].values, 1000.*t + 100.*1 + 10.*3. + (z + 7)%nz, rtol=1.e-15, atol=0.) # noqa: E501 - @pytest.mark.parametrize('nz', [6, 7, 8, 9]) + @pytest.mark.parametrize('nz', [pytest.param(6, marks=pytest.mark.long), + 7, + pytest.param(8, marks=pytest.mark.long), + pytest.param(9, marks=pytest.mark.long)]) def test_fromFieldAligned(self, tmpdir_factory, bout_xyt_example_files, nz): path = bout_xyt_example_files(tmpdir_factory, lengths=(3, 3, 4, nz), nxpe=1, nype=1, nt=1) From dc2f982e5259855aa010f6a4a7c11ff5a672a42a Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 21 Mar 2020 23:19:27 +0000 Subject: [PATCH 46/94] PEP8 fixes --- conftest.py | 2 + xbout/boutdataarray.py | 33 ++++++++-------- xbout/boutdataset.py | 8 ++-- xbout/geometries.py | 29 +++++++------- xbout/region.py | 65 ++++++++++++++++--------------- xbout/tests/test_boutdataarray.py | 19 ++++----- xbout/tests/test_boutdataset.py | 13 ++++--- xbout/tests/test_geometries.py | 2 +- xbout/tests/test_region.py | 3 +- xbout/utils.py | 1 + 10 files changed, 92 insertions(+), 83 deletions(-) diff --git a/conftest.py b/conftest.py index 63267763..ffc59902 100644 --- a/conftest.py +++ b/conftest.py @@ -1,10 +1,12 @@ import pytest + # Add command line option '--long' for pytest, to be used to enable long tests def pytest_addoption(parser): parser.addoption("--long", action="store_true", default=False, help="enable tests marked as 'long'") + def pytest_collection_modifyitems(config, items): if not config.getoption("--long"): # --long not given in cli: skip long tests diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 6f7773aa..b2578235 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -387,8 +387,8 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, If int, number of toroidal points to output, applies a stride to toroidal direction to save memory usage. It is not always possible to get a particular number of output points with a constant stride, so the number of outputs will - be only less than or equal to toroidal_points. If sequence of int, the indexes - of toroidal points for the output. + be only less than or equal to toroidal_points. If sequence of int, the + indexes of toroidal points for the output. method : str, optional The interpolation method to use. Options from xarray.DataArray.interp(), currently: linear, nearest, zero, slinear, quadratic, cubic. Default is @@ -439,12 +439,12 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, da = _update_metadata_increased_resolution(da, n) - # Add dy to da as a coordinate. This will only be temporary, once we have combined - # the regions together, we will demote dy to a regular variable + # Add dy to da as a coordinate. This will only be temporary, once we have + # combined the regions together, we will demote dy to a regular variable dy_array = xr.DataArray(np.full([da.sizes[xcoord], da.sizes[ycoord]], dy), dims=[xcoord, ycoord]) - # need a view of da with only x- and y-dimensions, unfortunately no neat way to do - # this with isel + # need a view of da with only x- and y-dimensions, unfortunately no neat way to + # do this with isel da_2d = da if tcoord in da.sizes: da_2d = da_2d.isel(**{tcoord: 0}, drop=True) @@ -472,7 +472,6 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, return da - def highParallelRes(self, return_dataset=False, **kwargs): """ Interpolate in the parallel direction to get a higher resolution version of the @@ -501,20 +500,20 @@ def highParallelRes(self, return_dataset=False, **kwargs): return_dataset=True, instead returns a Dataset containing the DataArray.) """ - # xr.combine_by_coords does not keep attrs at the moment. See - # https://github.com/pydata/xarray/issues/3865 - # For now just copy the attrs from the first region. Can remove this workaround - # when the xarray issue is fixed. Should be able to use just: - #return xr.combine_by_coords( - # [self.highParallelResRegion(region, **kwargs).bout.to_dataset() - # for region in self.data.regions] - # ) - parts = [self.highParallelResRegion(region, **kwargs).bout.to_dataset() - for region in self.data.regions] + for region in self.data.regions] result = xr.combine_by_coords(parts) result.attrs = parts[0].attrs + # xr.combine_by_coords does not keep attrs at the moment. See + # https://github.com/pydata/xarray/issues/3865 + # For now just copy the attrs from the first region. Can remove this workaround + # when the xarray issue is fixed. Should be able to use instead of the above + # just: + # result = xr.combine_by_coords( + # [self.highParallelResRegion(region, **kwargs).bout.to_dataset() + # for region in self.data.regions] + # ) # result has all regions, so should not have a region attribute if 'region' in result.attrs: diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 22757102..0f20ffcd 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -138,16 +138,16 @@ def getHighParallelResVars(self, variables, **kwargs): **kwargs)['dx'] # dy needs to be compatible with the new poloidal coordinate - # dy was created as a coordinate in BoutDataArray.highParallelResRegion, here just - # need to demote back to a regular variable. + # dy was created as a coordinate in BoutDataArray.highParallelResRegion, here + # just need to demote back to a regular variable. ds = ds.reset_coords('dy') # Apply geometry try: ds = apply_geometry(ds, ds.geometry) except AttributeError as e: - # if no geometry was originally applied, then ds has no geometry attribute and - # we can continue without applying geometry here + # if no geometry was originally applied, then ds has no geometry attribute + # and we can continue without applying geometry here if str(e) != "'Dataset' object has no attribute 'geometry'": raise diff --git a/xbout/geometries.py b/xbout/geometries.py index cb8e3777..e93cd9b7 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -72,8 +72,8 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): # ###################### # Note the global coordinates used here are defined so that they are zero at # the boundaries of the grid (where the grid includes all boundary cells), not - # necessarily the physical boundaries, because constant offsets do not matter, as long - # as these bounds are consistent with the global coordinates defined in + # necessarily the physical boundaries, because constant offsets do not matter, as + # long as these bounds are consistent with the global coordinates defined in # Region.__init__() (we will only use these coordinates for interpolation) and it is # simplest to calculate them with cumsum(). xcoord = updated_ds.metadata.get('bout_xdim', 'x') @@ -83,8 +83,8 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): # Make index 'x' a coordinate, useful for handling global indexing # Note we have to use the index value, not the value calculated from 'dx' because # 'dx' may not be consistent between different regions (e.g. core and PFR). - # For some geometries xcoord may have already been created by add_geometry_coords, - # in which case we do not need this. + # For some geometries xcoord may have already been created by + # add_geometry_coords, in which case we do not need this. nx = updated_ds.dims[xcoord] updated_ds = updated_ds.assign_coords(**{xcoord: np.arange(nx)}) ny = updated_ds.dims[ycoord] @@ -115,7 +115,7 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): + str(2.*np.pi*updated_ds.metadata['ZMAX'] - z0) + '): using value from dz') z = xr.DataArray(np.linspace(start=z0, stop=z1, num=nz, endpoint=False), - dims=zcoord) + dims=zcoord) updated_ds = updated_ds.assign_coords(**{zcoord: z}) return updated_ds @@ -171,8 +171,8 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): coordinates = _set_default_toroidal_coordinates(coordinates) - # If the coordinates already exist, we are re-applying the geometry and do not need to - # add them again. + # If the coordinates already exist, we are re-applying the geometry and do not need + # to add them again. # Ignore coordinates['z'] because ds might be Field2D-type without a z-dimension, and # if the other coordinates all match for a Field3D-type ds, we must actually be # re-applying the geometry. @@ -181,21 +181,22 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): for c in coordinates.values()]): # Check whether coordinates names conflict with variables in ds - bad_names = [name for name in coordinates.values() if name in ds and name not in - ds.coords] + bad_names = [name for name in coordinates.values() + if name in ds and name not in ds.coords] if bad_names: raise ValueError("Coordinate names {} clash with variables in the dataset. " - "Register a different geometry to provide alternative names. " - "It may be useful to use the 'coordinates' argument to " - "add_toroidal_geometry_coords() for this.".format(bad_names)) + "Register a different geometry to provide alternative " + "names. It may be useful to use the 'coordinates' argument " + "to add_toroidal_geometry_coords() for this." + .format(bad_names)) # Get extra geometry information from grid file if it's not in the dump files needed_variables = ['psixy', 'Rxy', 'Zxy'] for v in needed_variables: if v not in ds: if grid is None: - raise ValueError("Grid file is required to provide %s. Pass the grid " - "file name as the 'gridfilepath' argument to " + raise ValueError("Grid file is required to provide %s. Pass the " + "grid file name as the 'gridfilepath' argument to " "open_boutdataset().") ds[v] = grid[v] diff --git a/xbout/region.py b/xbout/region.py index 3df358d5..93aef404 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -92,11 +92,11 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, ycoord = ds.metadata['bout_ydim'] # Note the global coordinates used here are defined so that they are zero at - # the boundaries of the grid (where the grid includes all boundary cells), not - # necessarily the physical boundaries because constant offsets do not matter, - # as long as these bounds are consistent with the global coordinates defined - # in apply_geometry (we will only use these coordinates for interpolation) and - # it is simplest to calculate them with cumsum(). + # the boundaries of the grid (where the grid includes all boundary cells), + # not necessarily the physical boundaries because constant offsets do not + # matter, as long as these bounds are consistent with the global coordinates + # defined in apply_geometry (we will only use these coordinates for + # interpolation) and it is simplest to calculate them with cumsum(). # dx is constant in any particular region in the y-direction, so convert to a # 1d array @@ -114,7 +114,7 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, dy = ds['dy'].isel(**{xcoord: self.xinner_ind}) dy_cumsum = dy.cumsum() self.ylower = dy_cumsum[ylower_ind] - dy[ylower_ind] - self.yupper = dy_cumsum[yupper_ind- 1] + self.yupper = dy_cumsum[yupper_ind - 1] def __repr__(self): result = "\n" @@ -405,8 +405,9 @@ def _create_regions_toroidal(ds): connect_outer='upper_inner_intersep', connect_lower='upper_outer_PFR') regions['upper_inner_intersep'] = Region( name='upper_inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys21 + 1, yupper_ind=nyinner, connect_inner='upper_inner_PFR', - connect_outer='upper_inner_SOL', connect_lower='upper_outer_intersep') + ylower_ind=jys21 + 1, yupper_ind=nyinner, + connect_inner='upper_inner_PFR', connect_outer='upper_inner_SOL', + connect_lower='upper_outer_intersep') regions['upper_inner_SOL'] = Region( name='upper_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, ylower_ind=jys21 + 1, yupper_ind=nyinner, @@ -417,8 +418,9 @@ def _create_regions_toroidal(ds): connect_outer='upper_outer_intersep', connect_upper='upper_inner_PFR') regions['upper_outer_intersep'] = Region( name='upper_outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=nyinner, yupper_ind=jys12 + 1, connect_inner='upper_outer_PFR', - connect_outer='upper_outer_SOL', connect_upper='upper_inner_intersep') + ylower_ind=nyinner, yupper_ind=jys12 + 1, + connect_inner='upper_outer_PFR', connect_outer='upper_outer_SOL', + connect_upper='upper_inner_intersep') regions['upper_outer_SOL'] = Region( name='upper_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, ylower_ind=nyinner, yupper_ind=jys12 + 1, @@ -440,16 +442,16 @@ def _create_regions_toroidal(ds): connect_upper='lower_outer_SOL') regions['lower_outer_PFR'] = Region( name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_intersep', - connect_lower='lower_inner_PFR') + ylower_ind=jys22 + 1, yupper_ind=ny, + connect_outer='lower_outer_intersep', connect_lower='lower_inner_PFR') regions['lower_outer_intersep'] = Region( name='lower_outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', connect_outer='lower_outer_SOL', connect_lower='outer_intersep') regions['lower_outer_SOL'] = Region( name='lower_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_intersep', - connect_lower='outer_SOL') + ylower_ind=jys22 + 1, yupper_ind=ny, + connect_inner='lower_outer_intersep', connect_lower='outer_SOL') _check_connections(regions) elif topology == 'connected-double-null': regions['lower_inner_PFR'] = Region( @@ -470,20 +472,20 @@ def _create_regions_toroidal(ds): connect_lower='lower_inner_SOL', connect_upper='upper_inner_SOL') regions['upper_inner_PFR'] = Region( name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys21 + 1, yupper_ind=nyinner, connect_outer='upper_inner_SOL', - connect_lower='upper_outer_PFR') + ylower_ind=jys21 + 1, yupper_ind=nyinner, + connect_outer='upper_inner_SOL', connect_lower='upper_outer_PFR') regions['upper_inner_SOL'] = Region( name='upper_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys21 + 1, yupper_ind=nyinner, connect_inner='upper_inner_PFR', - connect_lower='inner_SOL') + ylower_ind=jys21 + 1, yupper_ind=nyinner, + connect_inner='upper_inner_PFR', connect_lower='inner_SOL') regions['upper_outer_PFR'] = Region( name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=nyinner, yupper_ind=jys12 + 1, connect_outer='upper_outer_SOL', - connect_upper='upper_inner_PFR') + ylower_ind=nyinner, yupper_ind=jys12 + 1, + connect_outer='upper_outer_SOL', connect_upper='upper_inner_PFR') regions['upper_outer_SOL'] = Region( name='upper_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=nyinner, yupper_ind=jys12 + 1, connect_inner='upper_outer_PFR', - connect_upper='outer_SOL') + ylower_ind=nyinner, yupper_ind=jys12 + 1, + connect_inner='upper_outer_PFR', connect_upper='outer_SOL') regions['outer_core'] = Region( name='outer_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_outer='outer_SOL', @@ -504,7 +506,8 @@ def _create_regions_toroidal(ds): elif topology == 'single-null': regions['inner_PFR'] = Region( name='inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=0, - yupper_ind=jys11 + 1, connect_outer='inner_SOL', connect_upper='outer_PFR') + yupper_ind=jys11 + 1, connect_outer='inner_SOL', + connect_upper='outer_PFR') regions['inner_SOL'] = Region( name='inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='inner_PFR', connect_upper='SOL') @@ -555,20 +558,20 @@ def _create_regions_toroidal(ds): connect_upper='upper_inner_SOL') regions['upper_inner_PFR'] = Region( name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys11 + 1, yupper_ind=nyinner, connect_outer='upper_inner_SOL', - connect_lower='upper_outer_PFR') + ylower_ind=jys11 + 1, yupper_ind=nyinner, + connect_outer='upper_inner_SOL', connect_lower='upper_outer_PFR') regions['upper_inner_SOL'] = Region( name='upper_inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=jys11 + 1, yupper_ind=nyinner, connect_inner='upper_inner_PFR', - connect_lower='lower_inner_SOL') + ylower_ind=jys11 + 1, yupper_ind=nyinner, + connect_inner='upper_inner_PFR', connect_lower='lower_inner_SOL') regions['upper_outer_PFR'] = Region( name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=nyinner, yupper_ind=jys22 + 1, connect_outer='upper_outer_SOL', - connect_upper='upper_inner_PFR') + ylower_ind=nyinner, yupper_ind=jys22 + 1, + connect_outer='upper_outer_SOL', connect_upper='upper_inner_PFR') regions['upper_outer_SOL'] = Region( name='upper_outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=nyinner, yupper_ind=jys22 + 1, connect_inner='upper_outer_PFR', - connect_upper='lower_outer_SOL') + ylower_ind=nyinner, yupper_ind=jys22 + 1, + connect_inner='upper_outer_PFR', connect_upper='lower_outer_SOL') regions['lower_outer_PFR'] = Region( name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_SOL', diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index 8e0de1db..0890dd40 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -132,7 +132,7 @@ def test_fromFieldAligned(self, tmpdir_factory, bout_xyt_example_files, nz): @pytest.mark.long def test_highParallelResRegion_core(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, - nype=1, nt=1, grid='grid', guards={'y':2}, + nype=1, nt=1, grid='grid', guards={'y': 2}, topology='core') ds = open_boutdataset(datapath=path, @@ -171,7 +171,7 @@ def f(t): def test_highParallelResRegion_core_change_n(self, tmpdir_factory, bout_xyt_example_files, res_factor): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, - nype=1, nt=1, grid='grid', guards={'y':2}, + nype=1, nt=1, grid='grid', guards={'y': 2}, topology='core') ds = open_boutdataset(datapath=path, @@ -207,7 +207,7 @@ def f(t): @pytest.mark.long def test_highParallelResRegion_sol(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, - nype=1, nt=1, grid='grid', guards={'y':2}, + nype=1, nt=1, grid='grid', guards={'y': 2}, topology='sol') ds = open_boutdataset(datapath=path, @@ -242,7 +242,7 @@ def f(t): def test_highParallelResRegion_singlenull(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, - nype=3, nt=1, grid='grid', guards={'y':2}, + nype=3, nt=1, grid='grid', guards={'y': 2}, topology='single-null') ds = open_boutdataset(datapath=path, @@ -285,7 +285,7 @@ def f(t): npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) for region in ['outer_PFR', 'outer_SOL']: - n_highres = n.bout.highParallelResRegion(region).isel(theta=slice( -2)) + n_highres = n.bout.highParallelResRegion(region).isel(theta=slice(-2)) expected = f_fine.broadcast_like(n_highres) @@ -293,7 +293,7 @@ def f(t): def test_highParallelRes(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, - nype=3, nt=1, grid='grid', guards={'y':2}, + nype=3, nt=1, grid='grid', guards={'y': 2}, topology='single-null') ds = open_boutdataset(datapath=path, @@ -331,9 +331,10 @@ def f_y(t): npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.1e-2) - def test_highParallelRes_toroidal_points(self, tmpdir_factory, bout_xyt_example_files): + def test_highParallelRes_toroidal_points(self, tmpdir_factory, + bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, - nype=3, nt=1, grid='grid', guards={'y':2}, + nype=3, nt=1, grid='grid', guards={'y': 2}, topology='single-null') ds = open_boutdataset(datapath=path, @@ -349,7 +350,7 @@ def test_highParallelRes_toroidal_points(self, tmpdir_factory, bout_xyt_example_ def test_highParallelRes_toroidal_points_list(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, - nype=3, nt=1, grid='grid', guards={'y':2}, + nype=3, nt=1, grid='grid', guards={'y': 2}, topology='single-null') ds = open_boutdataset(datapath=path, diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index 82802980..bf2a7e3e 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -8,7 +8,7 @@ from xbout.tests.test_load import bout_xyt_example_files, create_bout_ds from xbout.tests.test_region import (params_guards, params_guards_values, - params_boundaries, params_boundaries_values) + params_boundaries, params_boundaries_values) from xbout import BoutDatasetAccessor, open_boutdataset from xbout.geometries import apply_geometry from xbout.utils import _set_attrs_on_all_vars @@ -103,7 +103,7 @@ def test_resetParallelInterpFactor(self): @pytest.mark.parametrize(params_guards, params_guards_values) @pytest.mark.parametrize(params_boundaries, params_boundaries_values) def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, - guards, keep_xboundaries, keep_yboundaries): + guards, keep_xboundaries, keep_yboundaries): # This test checks that the regions created in the new high-resolution Dataset by # getHighParallelResVars are correct. # This test does not test the accuracy of the parallel interpolation (there are @@ -178,13 +178,15 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), theta=slice(jys11 + 1, jys11 + 1 + myg)).values, - v_lower_inner_intersep.isel(theta=slice(-myg, None)).values) + v_lower_inner_intersep.isel( + theta=slice(-myg, None)).values) v_lower_inner_SOL = v.bout.fromRegion('lower_inner_SOL') # Remove attributes that are expected to be different del v_lower_inner_SOL.attrs['region'] - xrt.assert_identical(v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys11 + 1)), + xrt.assert_identical(v.isel(x=slice(ixs2 - mxg, None), + theta=slice(jys11 + 1)), v_lower_inner_SOL.isel( theta=slice(-myg if myg != 0 else None))) if myg > 0: @@ -317,7 +319,8 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), theta=slice(jys21 + 1, jys21 + 1 + myg)).values, - v_upper_outer_intersep.isel(theta=slice(-myg, None)).values) + v_upper_outer_intersep.isel( + theta=slice(-myg, None)).values) v_upper_outer_SOL = v.bout.fromRegion('upper_outer_SOL') diff --git a/xbout/tests/test_geometries.py b/xbout/tests/test_geometries.py index 9ab6038e..1ccb9f1e 100644 --- a/xbout/tests/test_geometries.py +++ b/xbout/tests/test_geometries.py @@ -23,7 +23,7 @@ def add_schwarzschild_coords(ds, coordinates=None): assert "Schwarzschild" in REGISTERED_GEOMETRIES.keys() original = Dataset() - original['dy'] = DataArray(np.ones((3,4)), dims=('x', 'y')) + original['dy'] = DataArray(np.ones((3, 4)), dims=('x', 'y')) original.attrs['metadata'] = {} updated = apply_geometry(ds=original, geometry_name="Schwarzschild") assert_equal(updated['event_horizon'], DataArray(4.0)) diff --git a/xbout/tests/test_region.py b/xbout/tests/test_region.py index a386b67b..ea750d2d 100644 --- a/xbout/tests/test_region.py +++ b/xbout/tests/test_region.py @@ -20,6 +20,7 @@ pytest.param(False, True, marks=pytest.mark.long), (True, True)] + class TestRegion: @pytest.mark.long @@ -1292,5 +1293,3 @@ def test_region_disconnecteddoublenull_get_one_guard( npt.assert_equal(n.isel(x=slice(ixs2 - xguards, None), theta=slice(jys22 + 1 - yguards, jys22 + 1)).values, n_lower_outer_SOL.isel(theta=slice(yguards)).values) - - diff --git a/xbout/utils.py b/xbout/utils.py index 0530ce09..709dab24 100644 --- a/xbout/utils.py +++ b/xbout/utils.py @@ -43,6 +43,7 @@ def _separate_metadata(ds): return ds.drop(scalar_vars), metadata + def _update_metadata_increased_resolution(da, n): """ Update the metadata variables to account for a y-direction resolution increased by a From 8db119d38c162f92785cddbdfdd4edda38890129 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 23 Mar 2020 16:16:19 +0000 Subject: [PATCH 47/94] Move Region creation out into a dict of functions Simplifies the _create_regions_toroidal() function. --- xbout/region.py | 500 ++++++++++++++++++++++++++---------------------- 1 file changed, 275 insertions(+), 225 deletions(-) diff --git a/xbout/region.py b/xbout/region.py index 93aef404..d5dc77c6 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -1,7 +1,5 @@ from collections import OrderedDict -import numpy as np - from .utils import _set_attrs_on_all_vars @@ -249,7 +247,7 @@ def _order_vars(lower, upper): def _get_topology(ds): jys11 = ds.metadata['jyseps1_1'] jys21 = ds.metadata['jyseps2_1'] - nyinner = ds.metadata['ny_inner'] + ny_inner = ds.metadata['ny_inner'] jys12 = ds.metadata['jyseps1_2'] jys22 = ds.metadata['jyseps2_2'] ny = ds.metadata['ny'] @@ -270,13 +268,13 @@ def _get_topology(ds): return 'single-null' if jys11 == jys21 and jys12 == jys22: - if jys11 < nyinner - 1 and jys22 > nyinner: + if jys11 < ny_inner - 1 and jys22 > ny_inner: return 'xpoint' else: raise ValueError('Currently unsupported topology') if ixs1 == ixs2: - if jys21 < nyinner - 1 and jys12 > nyinner: + if jys21 < ny_inner - 1 and jys12 > ny_inner: return 'connected-double-null' else: raise ValueError('Currently unsupported topology') @@ -316,21 +314,278 @@ def _check_connections(regions): + regions[region.connection_upper].connection_lower) +topologies = {} + + +def topology_disconnected_double_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, + jys12, jys22, ny, ybndry): + regions = OrderedDict() + regions['lower_inner_PFR'] = Region( + name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_intersep', + connect_upper='lower_outer_PFR') + regions['lower_inner_intersep'] = Region( + name='lower_inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, + ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', + connect_outer='lower_inner_SOL', connect_upper='inner_intersep') + regions['lower_inner_SOL'] = Region( + name='lower_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_intersep', + connect_upper='inner_SOL') + regions['inner_core'] = Region( + name='inner_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, + connect_outer='inner_intersep', connect_lower='outer_core', + connect_upper='outer_core') + regions['inner_intersep'] = Region( + name='inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_inner='inner_core', + connect_outer='inner_SOL', connect_lower='lower_inner_intersep', + connect_upper='outer_intersep') + regions['inner_SOL'] = Region( + name='inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, + connect_inner='inner_intersep', connect_lower='lower_inner_SOL', + connect_upper='upper_inner_SOL') + regions['upper_inner_PFR'] = Region( + name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys21 + 1, yupper_ind=ny_inner, + connect_outer='upper_inner_intersep', connect_lower='upper_outer_PFR') + regions['upper_inner_intersep'] = Region( + name='upper_inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, + ylower_ind=jys21 + 1, yupper_ind=ny_inner, + connect_inner='upper_inner_PFR', connect_outer='upper_inner_SOL', + connect_lower='upper_outer_intersep') + regions['upper_inner_SOL'] = Region( + name='upper_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=jys21 + 1, yupper_ind=ny_inner, + connect_inner='upper_inner_intersep', connect_lower='inner_SOL') + regions['upper_outer_PFR'] = Region( + name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=ny_inner, yupper_ind=jys12 + 1, + connect_outer='upper_outer_intersep', connect_upper='upper_inner_PFR') + regions['upper_outer_intersep'] = Region( + name='upper_outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, + ylower_ind=ny_inner, yupper_ind=jys12 + 1, + connect_inner='upper_outer_PFR', connect_outer='upper_outer_SOL', + connect_upper='upper_inner_intersep') + regions['upper_outer_SOL'] = Region( + name='upper_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=ny_inner, yupper_ind=jys12 + 1, + connect_inner='upper_outer_intersep', connect_upper='outer_SOL') + regions['outer_core'] = Region( + name='outer_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, + connect_outer='outer_intersep', connect_lower='inner_core', + connect_upper='inner_core') + regions['outer_intersep'] = Region( + name='outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_inner='outer_core', + connect_outer='outer_SOL', connect_lower='inner_intersep', + connect_upper='lower_outer_intersep') + regions['outer_SOL'] = Region( + name='outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, + connect_inner='outer_intersep', connect_lower='upper_outer_SOL', + connect_upper='lower_outer_SOL') + regions['lower_outer_PFR'] = Region( + name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys22 + 1, yupper_ind=ny, + connect_outer='lower_outer_intersep', connect_lower='lower_inner_PFR') + regions['lower_outer_intersep'] = Region( + name='lower_outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, + ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', + connect_outer='lower_outer_SOL', connect_lower='outer_intersep') + regions['lower_outer_SOL'] = Region( + name='lower_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=jys22 + 1, yupper_ind=ny, + connect_inner='lower_outer_intersep', connect_lower='outer_SOL') + return regions + + +topologies['disconnected-double-null'] = topology_disconnected_double_null + + +def topology_connected_double_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, + jys22, ny, ybndry): + regions = OrderedDict() + regions['lower_inner_PFR'] = Region( + name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_SOL', + connect_upper='lower_outer_PFR') + regions['lower_inner_SOL'] = Region( + name='lower_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', + connect_upper='inner_SOL') + regions['inner_core'] = Region( + name='inner_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_outer='inner_SOL', + connect_lower='outer_core', connect_upper='outer_core') + regions['inner_SOL'] = Region( + name='inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_inner='inner_core', + connect_lower='lower_inner_SOL', connect_upper='upper_inner_SOL') + regions['upper_inner_PFR'] = Region( + name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys21 + 1, yupper_ind=ny_inner, + connect_outer='upper_inner_SOL', connect_lower='upper_outer_PFR') + regions['upper_inner_SOL'] = Region( + name='upper_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=jys21 + 1, yupper_ind=ny_inner, + connect_inner='upper_inner_PFR', connect_lower='inner_SOL') + regions['upper_outer_PFR'] = Region( + name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=ny_inner, yupper_ind=jys12 + 1, + connect_outer='upper_outer_SOL', connect_upper='upper_inner_PFR') + regions['upper_outer_SOL'] = Region( + name='upper_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=ny_inner, yupper_ind=jys12 + 1, + connect_inner='upper_outer_PFR', connect_upper='outer_SOL') + regions['outer_core'] = Region( + name='outer_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_outer='outer_SOL', + connect_lower='inner_core', connect_upper='inner_core') + regions['outer_SOL'] = Region( + name='outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_inner='outer_core', + connect_lower='upper_outer_SOL', connect_upper='lower_outer_SOL') + regions['lower_outer_PFR'] = Region( + name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_SOL', + connect_lower='lower_inner_PFR') + regions['lower_outer_SOL'] = Region( + name='lower_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, + ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', + connect_lower='outer_SOL') + return regions + + +topologies['connected-double-null'] = topology_connected_double_null + + +def topology_single_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, + ny, ybndry): + regions = OrderedDict() + regions['inner_PFR'] = Region( + name='inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=0, + yupper_ind=jys11 + 1, connect_outer='inner_SOL', + connect_upper='outer_PFR') + regions['inner_SOL'] = Region( + name='inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=0, + yupper_ind=jys11 + 1, connect_inner='inner_PFR', connect_upper='SOL') + regions['core'] = Region( + name='core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys11 + 1, + yupper_ind=jys22 + 1, connect_outer='SOL', connect_lower='core', + connect_upper='core') + regions['SOL'] = Region( + name='SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=jys11 + 1, + yupper_ind=jys22 + 1, connect_inner='core', connect_lower='inner_SOL', + connect_upper='outer_SOL') + regions['outer_PFR'] = Region( + name='outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='outer_SOL', + connect_lower='inner_PFR') + regions['outer_SOL'] = Region( + name='outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, + ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='outer_PFR', + connect_lower='SOL') + return regions + + +topologies['single-null'] = topology_single_null + + +def topology_limiter(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, + ybndry): + regions = OrderedDict() + regions['core'] = Region( + name='core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=ybndry, + yupper_ind=ny - ybndry, connect_outer='SOL', connect_lower='core', + connect_upper='core') + regions['SOL'] = Region( + name='SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=0, + yupper_ind=ny, connect_inner='core') + return regions + + +topologies['limiter'] = topology_limiter + + +def topology_core(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, + ybndry): + regions = OrderedDict() + regions['core'] = Region( + name='core', ds=ds, xinner_ind=0, xouter_ind=nx, ylower_ind=ybndry, + yupper_ind=ny - ybndry, connect_lower='core', connect_upper='core') + return regions + + +topologies['core'] = topology_core + + +def topology_sol(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, + ybndry): + regions = OrderedDict() + regions['SOL'] = Region( + name='SOL', ds=ds, xinner_ind=0, xouter_ind=nx, ylower_ind=0, + yupper_ind=ny) + return regions + + +topologies['sol'] = topology_sol + + +def topology_xpoint(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, + ybndry): + regions = OrderedDict() + regions['lower_inner_PFR'] = Region( + name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_SOL', + connect_upper='lower_outer_PFR') + regions['lower_inner_SOL'] = Region( + name='lower_inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, + ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', + connect_upper='upper_inner_SOL') + regions['upper_inner_PFR'] = Region( + name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys11 + 1, yupper_ind=ny_inner, + connect_outer='upper_inner_SOL', connect_lower='upper_outer_PFR') + regions['upper_inner_SOL'] = Region( + name='upper_inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, + ylower_ind=jys11 + 1, yupper_ind=ny_inner, + connect_inner='upper_inner_PFR', connect_lower='lower_inner_SOL') + regions['upper_outer_PFR'] = Region( + name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=ny_inner, yupper_ind=jys22 + 1, + connect_outer='upper_outer_SOL', connect_upper='upper_inner_PFR') + regions['upper_outer_SOL'] = Region( + name='upper_outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, + ylower_ind=ny_inner, yupper_ind=jys22 + 1, + connect_inner='upper_outer_PFR', connect_upper='lower_outer_SOL') + regions['lower_outer_PFR'] = Region( + name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, + ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_SOL', + connect_lower='lower_inner_PFR') + regions['lower_outer_SOL'] = Region( + name='lower_outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, + ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', + connect_lower='upper_outer_SOL') + return regions + + +topologies['xpoint'] = topology_xpoint + + def _create_regions_toroidal(ds): topology = _get_topology(ds) - coordinates = {'t': ds.metadata.get('bout_tdim', None), - 'x': ds.metadata.get('bout_xdim', None), - 'y': ds.metadata.get('bout_ydim', None), - 'z': ds.metadata.get('bout_zdim', None)} - ixs1 = ds.metadata['ixseps1'] ixs2 = ds.metadata['ixseps2'] nx = ds.metadata['nx'] jys11 = ds.metadata['jyseps1_1'] jys21 = ds.metadata['jyseps2_1'] - nyinner = ds.metadata['ny_inner'] + ny_inner = ds.metadata['ny_inner'] jys12 = ds.metadata['jyseps1_2'] jys22 = ds.metadata['jyseps2_2'] ny = ds.metadata['ny'] @@ -353,7 +608,7 @@ def _create_regions_toroidal(ds): jys21 = _in_range(jys21, 0, ny - 1) jys12 = _in_range(jys12, 0, ny - 1) jys21, jys12 = _order_vars(jys21, jys12) - nyinner = _in_range(nyinner, jys21 + 1, jys12 + 1) + ny_inner = _in_range(ny_inner, jys21 + 1, jys12 + 1) jys22 = _in_range(jys22, 0, ny - 1) # Adjust for boundary cells @@ -364,226 +619,21 @@ def _create_regions_toroidal(ds): nx -= 2*mxg jys11 += ybndry jys21 += ybndry - nyinner += ybndry + ybndry_upper + ny_inner += ybndry + ybndry_upper jys12 += ybndry + 2*ybndry_upper jys22 += ybndry + 2*ybndry_upper ny += 2*ybndry + 2*ybndry_upper # Note, include guard cells in the created regions, fill them later - regions = OrderedDict() - if topology == 'disconnected-double-null': - regions['lower_inner_PFR'] = Region( - name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_intersep', - connect_upper='lower_outer_PFR') - regions['lower_inner_intersep'] = Region( - name='lower_inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', - connect_outer='lower_inner_SOL', connect_upper='inner_intersep') - regions['lower_inner_SOL'] = Region( - name='lower_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_intersep', - connect_upper='inner_SOL') - regions['inner_core'] = Region( - name='inner_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, - connect_outer='inner_intersep', connect_lower='outer_core', - connect_upper='outer_core') - regions['inner_intersep'] = Region( - name='inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_inner='inner_core', - connect_outer='inner_SOL', connect_lower='lower_inner_intersep', - connect_upper='outer_intersep') - regions['inner_SOL'] = Region( - name='inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, - connect_inner='inner_intersep', connect_lower='lower_inner_SOL', - connect_upper='upper_inner_SOL') - regions['upper_inner_PFR'] = Region( - name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys21 + 1, yupper_ind=nyinner, - connect_outer='upper_inner_intersep', connect_lower='upper_outer_PFR') - regions['upper_inner_intersep'] = Region( - name='upper_inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys21 + 1, yupper_ind=nyinner, - connect_inner='upper_inner_PFR', connect_outer='upper_inner_SOL', - connect_lower='upper_outer_intersep') - regions['upper_inner_SOL'] = Region( - name='upper_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys21 + 1, yupper_ind=nyinner, - connect_inner='upper_inner_intersep', connect_lower='inner_SOL') - regions['upper_outer_PFR'] = Region( - name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=nyinner, yupper_ind=jys12 + 1, - connect_outer='upper_outer_intersep', connect_upper='upper_inner_PFR') - regions['upper_outer_intersep'] = Region( - name='upper_outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=nyinner, yupper_ind=jys12 + 1, - connect_inner='upper_outer_PFR', connect_outer='upper_outer_SOL', - connect_upper='upper_inner_intersep') - regions['upper_outer_SOL'] = Region( - name='upper_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=nyinner, yupper_ind=jys12 + 1, - connect_inner='upper_outer_intersep', connect_upper='outer_SOL') - regions['outer_core'] = Region( - name='outer_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, - connect_outer='outer_intersep', connect_lower='inner_core', - connect_upper='inner_core') - regions['outer_intersep'] = Region( - name='outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_inner='outer_core', - connect_outer='outer_SOL', connect_lower='inner_intersep', - connect_upper='lower_outer_intersep') - regions['outer_SOL'] = Region( - name='outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, - connect_inner='outer_intersep', connect_lower='upper_outer_SOL', - connect_upper='lower_outer_SOL') - regions['lower_outer_PFR'] = Region( - name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny, - connect_outer='lower_outer_intersep', connect_lower='lower_inner_PFR') - regions['lower_outer_intersep'] = Region( - name='lower_outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', - connect_outer='lower_outer_SOL', connect_lower='outer_intersep') - regions['lower_outer_SOL'] = Region( - name='lower_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny, - connect_inner='lower_outer_intersep', connect_lower='outer_SOL') - _check_connections(regions) - elif topology == 'connected-double-null': - regions['lower_inner_PFR'] = Region( - name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_SOL', - connect_upper='lower_outer_PFR') - regions['lower_inner_SOL'] = Region( - name='lower_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', - connect_upper='inner_SOL') - regions['inner_core'] = Region( - name='inner_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_outer='inner_SOL', - connect_lower='outer_core', connect_upper='outer_core') - regions['inner_SOL'] = Region( - name='inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_inner='inner_core', - connect_lower='lower_inner_SOL', connect_upper='upper_inner_SOL') - regions['upper_inner_PFR'] = Region( - name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys21 + 1, yupper_ind=nyinner, - connect_outer='upper_inner_SOL', connect_lower='upper_outer_PFR') - regions['upper_inner_SOL'] = Region( - name='upper_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys21 + 1, yupper_ind=nyinner, - connect_inner='upper_inner_PFR', connect_lower='inner_SOL') - regions['upper_outer_PFR'] = Region( - name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=nyinner, yupper_ind=jys12 + 1, - connect_outer='upper_outer_SOL', connect_upper='upper_inner_PFR') - regions['upper_outer_SOL'] = Region( - name='upper_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=nyinner, yupper_ind=jys12 + 1, - connect_inner='upper_outer_PFR', connect_upper='outer_SOL') - regions['outer_core'] = Region( - name='outer_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_outer='outer_SOL', - connect_lower='inner_core', connect_upper='inner_core') - regions['outer_SOL'] = Region( - name='outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_inner='outer_core', - connect_lower='upper_outer_SOL', connect_upper='lower_outer_SOL') - regions['lower_outer_PFR'] = Region( - name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_SOL', - connect_lower='lower_inner_PFR') - regions['lower_outer_SOL'] = Region( - name='lower_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', - connect_lower='outer_SOL') - _check_connections(regions) - elif topology == 'single-null': - regions['inner_PFR'] = Region( - name='inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=0, - yupper_ind=jys11 + 1, connect_outer='inner_SOL', - connect_upper='outer_PFR') - regions['inner_SOL'] = Region( - name='inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=0, - yupper_ind=jys11 + 1, connect_inner='inner_PFR', connect_upper='SOL') - regions['core'] = Region( - name='core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys11 + 1, - yupper_ind=jys22 + 1, connect_outer='SOL', connect_lower='core', - connect_upper='core') - regions['SOL'] = Region( - name='SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=jys11 + 1, - yupper_ind=jys22 + 1, connect_inner='core', connect_lower='inner_SOL', - connect_upper='outer_SOL') - regions['outer_PFR'] = Region( - name='outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='outer_SOL', - connect_lower='inner_PFR') - regions['outer_SOL'] = Region( - name='outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='outer_PFR', - connect_lower='SOL') - _check_connections(regions) - elif topology == 'limiter': - regions['core'] = Region( - name='core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=ybndry, - yupper_ind=ny - ybndry, connect_outer='SOL', connect_lower='core', - connect_upper='core') - regions['SOL'] = Region( - name='SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=0, - yupper_ind=ny, connect_inner='core') - _check_connections(regions) - elif topology == 'core': - regions['core'] = Region( - name='core', ds=ds, xinner_ind=0, xouter_ind=nx, ylower_ind=ybndry, - yupper_ind=ny - ybndry, connect_lower='core', connect_upper='core') - _check_connections(regions) - elif topology == 'sol': - regions['SOL'] = Region( - name='SOL', ds=ds, xinner_ind=0, xouter_ind=nx, ylower_ind=0, - yupper_ind=ny) - _check_connections(regions) - elif topology == 'xpoint': - regions['lower_inner_PFR'] = Region( - name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_SOL', - connect_upper='lower_outer_PFR') - regions['lower_inner_SOL'] = Region( - name='lower_inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', - connect_upper='upper_inner_SOL') - regions['upper_inner_PFR'] = Region( - name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys11 + 1, yupper_ind=nyinner, - connect_outer='upper_inner_SOL', connect_lower='upper_outer_PFR') - regions['upper_inner_SOL'] = Region( - name='upper_inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=jys11 + 1, yupper_ind=nyinner, - connect_inner='upper_inner_PFR', connect_lower='lower_inner_SOL') - regions['upper_outer_PFR'] = Region( - name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=nyinner, yupper_ind=jys22 + 1, - connect_outer='upper_outer_SOL', connect_upper='upper_inner_PFR') - regions['upper_outer_SOL'] = Region( - name='upper_outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=nyinner, yupper_ind=jys22 + 1, - connect_inner='upper_outer_PFR', connect_upper='lower_outer_SOL') - regions['lower_outer_PFR'] = Region( - name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_SOL', - connect_lower='lower_inner_PFR') - regions['lower_outer_SOL'] = Region( - name='lower_outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', - connect_lower='upper_outer_SOL') - _check_connections(regions) - else: + try: + regions = topologies[topology](ds=ds, ixs1=ixs1, ixs2=ixs2, nx=nx, jys11=jys11, + jys21=jys21, ny_inner=ny_inner, jys12=jys12, + jys22=jys22, ny=ny, ybndry=ybndry) + except KeyError: raise NotImplementedError("Topology '" + topology + "' is not implemented") + _check_connections(regions) + ds = _set_attrs_on_all_vars(ds, 'regions', regions) return ds From ffb5bd5e9d43bca9f39ba2f4cfa49c2175e84887 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 23 Mar 2020 19:03:56 +0000 Subject: [PATCH 48/94] Just use dict, not OrderedDict, in region.py There is no need to use OrderedDict. --- xbout/region.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/xbout/region.py b/xbout/region.py index d5dc77c6..13fca530 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from .utils import _set_attrs_on_all_vars @@ -319,7 +317,7 @@ def _check_connections(regions): def topology_disconnected_double_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, ybndry): - regions = OrderedDict() + regions = {} regions['lower_inner_PFR'] = Region( name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_intersep', @@ -408,7 +406,7 @@ def topology_disconnected_double_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_in def topology_connected_double_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, ybndry): - regions = OrderedDict() + regions = {} regions['lower_inner_PFR'] = Region( name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_SOL', @@ -465,7 +463,7 @@ def topology_connected_double_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner def topology_single_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, ybndry): - regions = OrderedDict() + regions = {} regions['inner_PFR'] = Region( name='inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='inner_SOL', @@ -497,7 +495,7 @@ def topology_single_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, j def topology_limiter(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, ybndry): - regions = OrderedDict() + regions = {} regions['core'] = Region( name='core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=ybndry, yupper_ind=ny - ybndry, connect_outer='SOL', connect_lower='core', @@ -513,7 +511,7 @@ def topology_limiter(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22 def topology_core(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, ybndry): - regions = OrderedDict() + regions = {} regions['core'] = Region( name='core', ds=ds, xinner_ind=0, xouter_ind=nx, ylower_ind=ybndry, yupper_ind=ny - ybndry, connect_lower='core', connect_upper='core') @@ -525,7 +523,7 @@ def topology_core(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, n def topology_sol(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, ybndry): - regions = OrderedDict() + regions = {} regions['SOL'] = Region( name='SOL', ds=ds, xinner_ind=0, xouter_ind=nx, ylower_ind=0, yupper_ind=ny) @@ -537,7 +535,7 @@ def topology_sol(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny def topology_xpoint(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, ny, ybndry): - regions = OrderedDict() + regions = {} regions['lower_inner_PFR'] = Region( name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_SOL', From 8830487e7672c24187279bde858117fa0c99db63 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 23 Mar 2020 20:08:48 +0000 Subject: [PATCH 49/94] Use more f-strings Neater than using str() and joining strings with +. --- xbout/geometries.py | 7 +++---- xbout/region.py | 34 +++++++++++++++++----------------- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/xbout/geometries.py b/xbout/geometries.py index e93cd9b7..12cc1ebc 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -110,10 +110,9 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): z1 = z0 + nz*updated_ds.metadata['dz'] if not np.isclose(z1, 2.*np.pi*updated_ds.metadata['ZMAX'], rtol=1.e-15, atol=0.): - warn('Size of toroidal domain as calculated from nz*dz (' + str(z1 - z0) - + ' is not the same as 2pi*(ZMAX - ZMIN) (' - + str(2.*np.pi*updated_ds.metadata['ZMAX'] - z0) - + '): using value from dz') + warn(f"Size of toroidal domain as calculated from nz*dz ({str(z1 - z0)}" + f" is not the same as 2pi*(ZMAX - ZMIN) (" + f"{2.*np.pi*updated_ds.metadata['ZMAX'] - z0}): using value from dz") z = xr.DataArray(np.linspace(start=z0, stop=z1, num=nz, endpoint=False), dims=zcoord) updated_ds = updated_ds.assign_coords(**{zcoord: z}) diff --git a/xbout/region.py b/xbout/region.py index 13fca530..78781fe1 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -285,31 +285,31 @@ def _check_connections(regions): if region.connection_inner is not None: if regions[region.connection_inner].connection_outer != region.name: raise ValueError( - 'Inner connection of ' + region.name + ' is ' - + region.connection_inner + ', but outer connection of ' - + region.connection_inner + ' is ' - + regions[region.connection_inner].connection_outer) + f'Inner connection of {region.name} is ' + f'{region.connection_inner}, but outer connection of ' + f'{region.connection_inner} is ' + f'{regions[region.connection_inner].connection_outer}') if region.connection_outer is not None: if regions[region.connection_outer].connection_inner != region.name: raise ValueError( - 'Inner connection of ' + region.name + ' is ' - + region.connection_outer + ', but inner connection of ' - + region.connection_outer + ' is ' - + regions[region.connection_outer].connection_inner) + f'Inner connection of {region.name} is ' + f'{region.connection_outer}, but inner connection of ' + f'{region.connection_outer} is ' + f'{regions[region.connection_outer].connection_inner}') if region.connection_lower is not None: if regions[region.connection_lower].connection_upper != region.name: raise ValueError( - 'Inner connection of ' + region.name + ' is ' - + region.connection_lower + ', but upper connection of ' - + region.connection_lower + ' is ' - + regions[region.connection_lower].connection_upper) + f'Inner connection of {region.name} is ' + f'{region.connection_lower}, but upper connection of ' + f'{region.connection_lower} is ' + f'{regions[region.connection_lower].connection_upper}') if region.connection_upper is not None: if regions[region.connection_upper].connection_lower != region.name: raise ValueError( - 'Inner connection of ' + region.name + ' is ' - + region.connection_upper + ', but lower connection of ' - + region.connection_upper + ' is ' - + regions[region.connection_upper].connection_lower) + f'Inner connection of {region.name} is ' + f'{region.connection_upper}, but lower connection of ' + f'{region.connection_upper} is ' + f'{regions[region.connection_upper].connection_lower}') topologies = {} @@ -628,7 +628,7 @@ def _create_regions_toroidal(ds): jys21=jys21, ny_inner=ny_inner, jys12=jys12, jys22=jys22, ny=ny, ybndry=ybndry) except KeyError: - raise NotImplementedError("Topology '" + topology + "' is not implemented") + raise NotImplementedError(f"Topology '{topology}' is not implemented") _check_connections(regions) From 4aab6fe1417c99a50ff2a3fdf5efda189e106c5d Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 23 Mar 2020 22:04:01 +0000 Subject: [PATCH 50/94] Fix merge of 'regions' branch --- xbout/boutdataarray.py | 2 +- xbout/region.py | 1 + xbout/tests/test_boutdataset.py | 72 ++++++++++++++++----------------- 3 files changed, 38 insertions(+), 37 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index ee599e55..c5443c49 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -227,7 +227,7 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, except KeyError: n = 8 - da = da.bout.fromRegion(region.name, with_guards={xcoord: 0, ycoord: 2}) + da = da.bout.from_region(region.name, with_guards={xcoord: 0, ycoord: 2}) da = da.chunk({ycoord: None}) ny_fine = n*region.ny diff --git a/xbout/region.py b/xbout/region.py index 1cef8a62..b7253162 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -102,6 +102,7 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, # particular regions, so do not need to be consistent between different # regions (e.g. core and PFR), so we are not forced to use just the index # value here. + dx = ds['dx'] dx_cumsum = dx.cumsum() self.xinner = dx_cumsum[xinner_ind] - dx[xinner_ind] self.xouter = dx_cumsum[xouter_ind - 1] + dx[xouter_ind - 1] diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index bf2a7e3e..9a548ddf 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -151,7 +151,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, for var in ['n', 'T']: v = ds[var] - v_lower_inner_PFR = v.bout.fromRegion('lower_inner_PFR') + v_lower_inner_PFR = v.bout.from_region('lower_inner_PFR') # Remove attributes that are expected to be different del v_lower_inner_PFR.attrs['region'] @@ -159,13 +159,13 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v_lower_inner_PFR.isel( theta=slice(-myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 + mxg), theta=slice(jys22 + 1, jys22 + 1 + myg)).values, v_lower_inner_PFR.isel(theta=slice(-myg, None)).values) - v_lower_inner_intersep = v.bout.fromRegion('lower_inner_intersep') + v_lower_inner_intersep = v.bout.from_region('lower_inner_intersep') # Remove attributes that are expected to be different del v_lower_inner_intersep.attrs['region'] @@ -174,14 +174,14 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v_lower_inner_intersep.isel( theta=slice(-myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), theta=slice(jys11 + 1, jys11 + 1 + myg)).values, v_lower_inner_intersep.isel( theta=slice(-myg, None)).values) - v_lower_inner_SOL = v.bout.fromRegion('lower_inner_SOL') + v_lower_inner_SOL = v.bout.from_region('lower_inner_SOL') # Remove attributes that are expected to be different del v_lower_inner_SOL.attrs['region'] @@ -190,13 +190,13 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v_lower_inner_SOL.isel( theta=slice(-myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys11 + 1, jys11 + 1 + myg)).values, v_lower_inner_SOL.isel(theta=slice(-myg, None)).values) - v_inner_core = v.bout.fromRegion('inner_core') + v_inner_core = v.bout.from_region('inner_core') # Remove attributes that are expected to be different del v_inner_core.attrs['region'] @@ -205,7 +205,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v_inner_core.isel( theta=slice(myg, -myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 + mxg), theta=slice(jys22 + 1 - myg, jys22 + 1)).values, @@ -214,7 +214,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys12 + 1, jys12 + 1 + myg)).values, v_inner_core.isel(theta=slice(-myg, None)).values) - v_inner_intersep = v.bout.fromRegion('inner_intersep') + v_inner_intersep = v.bout.from_region('inner_intersep') # Remove attributes that are expected to be different del v_inner_intersep.attrs['region'] @@ -223,7 +223,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v_inner_intersep.isel( theta=slice(myg, -myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), theta=slice(jys11 + 1 - myg, jys11 + 1)).values, @@ -232,7 +232,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys12 + 1, jys12 + 1 + myg)).values, v_inner_intersep.isel(theta=slice(-myg, None)).values) - v_inner_sol = v.bout.fromRegion('inner_SOL') + v_inner_sol = v.bout.from_region('inner_SOL') # Remove attributes that are expected to be different del v_inner_sol.attrs['region'] @@ -240,7 +240,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys11 + 1, jys21 + 1)), v_inner_sol.isel(theta=slice(myg, -myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys11 + 1 - myg, jys11 + 1)).values, @@ -249,7 +249,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys21 + 1, jys21 + 1 + myg)).values, v_inner_sol.isel(theta=slice(-myg, None)).values) - v_upper_inner_PFR = v.bout.fromRegion('upper_inner_PFR') + v_upper_inner_PFR = v.bout.from_region('upper_inner_PFR') # Remove attributes that are expected to be different del v_upper_inner_PFR.attrs['region'] @@ -257,13 +257,13 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys21 + 1, ny_inner)), v_upper_inner_PFR.isel(theta=slice(myg, None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 + mxg), theta=slice(jys12 + 1 - myg, jys12 + 1)).values, v_upper_inner_PFR.isel(theta=slice(myg)).values) - v_upper_inner_intersep = v.bout.fromRegion('upper_inner_intersep') + v_upper_inner_intersep = v.bout.from_region('upper_inner_intersep') # Remove attributes that are expected to be different del v_upper_inner_intersep.attrs['region'] @@ -271,13 +271,13 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys21 + 1, ny_inner)), v_upper_inner_intersep.isel(theta=slice(myg, None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), theta=slice(jys12 + 1 - myg, jys12 + 1)).values, v_upper_inner_intersep.isel(theta=slice(myg)).values) - v_upper_inner_SOL = v.bout.fromRegion('upper_inner_SOL') + v_upper_inner_SOL = v.bout.from_region('upper_inner_SOL') # Remove attributes that are expected to be different del v_upper_inner_SOL.attrs['region'] @@ -285,13 +285,13 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys21 + 1, ny_inner)), v_upper_inner_SOL.isel(theta=slice(myg, None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys21 + 1 - myg, jys21 + 1)).values, v_upper_inner_SOL.isel(theta=slice(myg)).values) - v_upper_outer_PFR = v.bout.fromRegion('upper_outer_PFR') + v_upper_outer_PFR = v.bout.from_region('upper_outer_PFR') # Remove attributes that are expected to be different del v_upper_outer_PFR.attrs['region'] @@ -300,13 +300,13 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v_upper_outer_PFR.isel( theta=slice(-myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 + mxg), theta=slice(jys21 + 1, jys21 + 1 + myg)).values, v_upper_outer_PFR.isel(theta=slice(-myg, None)).values) - v_upper_outer_intersep = v.bout.fromRegion('upper_outer_intersep') + v_upper_outer_intersep = v.bout.from_region('upper_outer_intersep') # Remove attributes that are expected to be different del v_upper_outer_intersep.attrs['region'] @@ -315,14 +315,14 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v_upper_outer_intersep.isel( theta=slice(-myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), theta=slice(jys21 + 1, jys21 + 1 + myg)).values, v_upper_outer_intersep.isel( theta=slice(-myg, None)).values) - v_upper_outer_SOL = v.bout.fromRegion('upper_outer_SOL') + v_upper_outer_SOL = v.bout.from_region('upper_outer_SOL') # Remove attributes that are expected to be different del v_upper_outer_SOL.attrs['region'] @@ -331,13 +331,13 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v_upper_outer_SOL.isel( theta=slice(-myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys12 + 1, jys12 + 1 + myg)).values, v_upper_outer_SOL.isel(theta=slice(-myg, None)).values) - v_outer_core = v.bout.fromRegion('outer_core') + v_outer_core = v.bout.from_region('outer_core') # Remove attributes that are expected to be different del v_outer_core.attrs['region'] @@ -346,7 +346,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v_outer_core.isel( theta=slice(myg, -myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 + mxg), theta=slice(jys21 + 1 - myg, jys21 + 1)).values, @@ -355,7 +355,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys11 + 1, jys11 + 1 + myg)).values, v_outer_core.isel(theta=slice(-myg, None)).values) - v_outer_intersep = v.bout.fromRegion('outer_intersep') + v_outer_intersep = v.bout.from_region('outer_intersep') # Remove attributes that are expected to be different del v_outer_intersep.attrs['region'] @@ -364,7 +364,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v_outer_intersep.isel( theta=slice(myg, -myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), theta=slice(jys21 + 1 - myg, jys21 + 1)).values, @@ -373,7 +373,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys22 + 1, jys22 + 1 + myg)).values, v_outer_intersep.isel(theta=slice(-myg, None)).values) - v_outer_sol = v.bout.fromRegion('outer_SOL') + v_outer_sol = v.bout.from_region('outer_SOL') # Remove attributes that are expected to be different del v_outer_sol.attrs['region'] @@ -381,7 +381,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys12 + 1, jys22 + 1)), v_outer_sol.isel(theta=slice(myg, -myg if myg != 0 else None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys12 + 1 - myg, jys12 + 1)).values, @@ -390,7 +390,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys22 + 1, jys22 + 1 + myg)).values, v_outer_sol.isel(theta=slice(-myg, None)).values) - v_lower_outer_PFR = v.bout.fromRegion('lower_outer_PFR') + v_lower_outer_PFR = v.bout.from_region('lower_outer_PFR') # Remove attributes that are expected to be different del v_lower_outer_PFR.attrs['region'] @@ -398,13 +398,13 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys22 + 1, None)), v_lower_outer_PFR.isel(theta=slice(myg, None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 + mxg), theta=slice(jys11 + 1 - myg, jys11 + 1)).values, v_lower_outer_PFR.isel(theta=slice(myg)).values) - v_lower_outer_intersep = v.bout.fromRegion('lower_outer_intersep') + v_lower_outer_intersep = v.bout.from_region('lower_outer_intersep') # Remove attributes that are expected to be different del v_lower_outer_intersep.attrs['region'] @@ -412,13 +412,13 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys22 + 1, None)), v_lower_outer_intersep.isel(theta=slice(myg, None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs1 - mxg, ixs2 + mxg), theta=slice(jys22 + 1 - myg, jys22 + 1)).values, v_lower_outer_intersep.isel(theta=slice(myg)).values) - v_lower_outer_SOL = v.bout.fromRegion('lower_outer_SOL') + v_lower_outer_SOL = v.bout.from_region('lower_outer_SOL') # Remove attributes that are expected to be different del v_lower_outer_SOL.attrs['region'] @@ -426,7 +426,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys22 + 1, None)), v_lower_outer_SOL.isel(theta=slice(myg, None))) if myg > 0: - # check y-guards, which were 'communicated' by fromRegion + # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values npt.assert_equal(v.isel(x=slice(ixs2 - mxg, None), theta=slice(jys22 + 1 - myg, jys22 + 1)).values, From bd7a8240dfbec618385e6be2b7c13287e49c9674 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 23 Mar 2020 22:47:13 +0000 Subject: [PATCH 51/94] Rename highParallelResRegion and highParallelRes to interpolate_parallel Follow PEP8 naming convention. interpolate_parallel() returns a result in a certain region if given a region argument (replacing highParallelResRegion), and otherwise combines the results from all regions (replacing highParallelRes). --- xbout/boutdataarray.py | 114 +++++++++++++----------------- xbout/boutdataset.py | 20 +++--- xbout/tests/test_boutdataarray.py | 48 +++++++------ 3 files changed, 86 insertions(+), 96 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index c5443c49..e08fac47 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -183,31 +183,71 @@ def from_region(self, name, with_guards=None): return da - def highParallelResRegion(self, region, n=None, toroidal_points=None, - method='cubic'): + def interpolate_parallel(self, region=None, *, n=None, toroidal_points=None, + method='cubic', return_dataset=False): """ Interpolate in the parallel direction to get a higher resolution version of the - variable in a certain region + variable. Parameters ---------- - region : str - The region to calculate the output in + region : str, optional + By default, return a result with all regions interpolated separately and then + combined. If an explicit region argument is passed, then return the variable + from only that region. n : int, optional The factor to increase the resolution by. Defaults to the value set by BoutDataset.setupParallelInterp(), or 10 if that has not been called. toroidal_points : int or sequence of int, optional If int, number of toroidal points to output, applies a stride to toroidal - direction to save memory usage. It is not always possible to get a particular - number of output points with a constant stride, so the number of outputs will - be only less than or equal to toroidal_points. If sequence of int, the - indexes of toroidal points for the output. + direction to save memory usage. If sequence of int, the indexes of toroidal + points for the output. method : str, optional The interpolation method to use. Options from xarray.DataArray.interp(), currently: linear, nearest, zero, slinear, quadratic, cubic. Default is 'cubic'. + return_dataset : bool, optional + If this is set to True, return a Dataset containing this variable as a member + (by default returns a DataArray). Only used when region=None. + + Returns + ------- + A new DataArray containing a high-resolution version of the variable. (If + return_dataset=True, instead returns a Dataset containing the DataArray.) """ + if region is None: + # Call the single-region version of this method for each region, and combine + # the results together + parts = [ + self.interpolate_parallel(region, n=n, toroidal_points=toroidal_points, + method=method).bout.to_dataset() + for region in self.data.regions] + + result = xr.combine_by_coords(parts) + result.attrs = parts[0].attrs + # xr.combine_by_coords does not keep attrs at the moment. See + # https://github.com/pydata/xarray/issues/3865 For now just copy the attrs + # from the first region. Can remove this workaround when the xarray issue is + # fixed. Should be able to use instead of the above just: + # result = xr.combine_by_coords( + # [self.interpolate_parallel(region, n=n, toroidal_points=toroidal_points, + # method=method).bout.to_dataset()] + # ) + + # result has all regions, so should not have a region attribute + if 'region' in result.attrs: + del result.attrs['region'] + if 'region' in result[self.data.name].attrs: + del result[self.data.name].attrs['region'] + + if return_dataset: + return result + else: + # Extract the DataArray to return + return result[self.data.name] + + # Select a particular 'region' and interpolate to higher parallel resolution da = self.data region = da.regions[region] tcoord = da.metadata['bout_tdim'] @@ -285,62 +325,6 @@ def highParallelResRegion(self, region, n=None, toroidal_points=None, return da - def highParallelRes(self, return_dataset=False, **kwargs): - """ - Interpolate in the parallel direction to get a higher resolution version of the - variable. - - Parameters - ---------- - n : int, optional - The factor to increase the resolution by. Defaults to the value set by - BoutDataset.setupParallelInterp(), or 10 if that has not been called. - toroidal_points : int or sequence of int, optional - If int, number of toroidal points to output, applies a stride to toroidal - direction to save memory usage. If sequence of int, the indexes of toroidal - points for the output. - method : str, optional - The interpolation method to use. Options from xarray.DataArray.interp(), - currently: linear, nearest, zero, slinear, quadratic, cubic. Default is - 'cubic'. - return_dataset : bool, optional - If this is set to True, return a Dataset containing this variable as a member - (by default returns a DataArray) - - Returns - ------- - A new DataArray containing a high-resolution version of the variable. (If - return_dataset=True, instead returns a Dataset containing the DataArray.) - """ - - parts = [self.highParallelResRegion(region, **kwargs).bout.to_dataset() - for region in self.data.regions] - - result = xr.combine_by_coords(parts) - result.attrs = parts[0].attrs - # xr.combine_by_coords does not keep attrs at the moment. See - # https://github.com/pydata/xarray/issues/3865 - # For now just copy the attrs from the first region. Can remove this workaround - # when the xarray issue is fixed. Should be able to use instead of the above - # just: - # result = xr.combine_by_coords( - # [self.highParallelResRegion(region, **kwargs).bout.to_dataset() - # for region in self.data.regions] - # ) - - # result has all regions, so should not have a region attribute - if 'region' in result.attrs: - del result.attrs['region'] - if 'region' in result[self.data.name].attrs: - del result[self.data.name].attrs['region'] - - if return_dataset: - return result - else: - # Extract the DataArray to return - return result[self.data.name] - - def animate2D(self, animate_over='t', x=None, y=None, animate=True, fps=10, save_as=None, ax=None, poloidal_plot=False, logscale=None, **kwargs): """ diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 0f20ffcd..abd87c52 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -123,23 +123,25 @@ def getHighParallelResVars(self, variables, **kwargs): Dataset is a valid BoutDataset, although containing only the specified variables. """ if isinstance(variables, str): - ds = self.data[variables].bout.highParallelRes(return_dataset=True, **kwargs) + ds = self.data[variables].bout.interpolate_parallel(return_dataset=True, + **kwargs) else: # Need to start with a Dataset with attrs as merge() drops the attrs of the # passed-in argument. - ds = self.data[variables[0]].bout.highParallelRes(return_dataset=True, - **kwargs) + ds = self.data[variables[0]].bout.interpolate_parallel(return_dataset=True, + **kwargs) for var in variables[1:]: - ds = ds.merge(self.data[var].bout.highParallelRes(return_dataset=True, - **kwargs)) + ds = ds.merge( + self.data[var].bout.interpolate_parallel(return_dataset=True, + **kwargs) + ) # Add extra variables needed to make this a valid Dataset - ds['dx'] = self.data['dx'].bout.highParallelRes(return_dataset=True, - **kwargs)['dx'] + ds['dx'] = self.data['dx'].bout.interpolate_parallel(**kwargs) # dy needs to be compatible with the new poloidal coordinate - # dy was created as a coordinate in BoutDataArray.highParallelResRegion, here - # just need to demote back to a regular variable. + # dy was created as a coordinate in BoutDataArray.interpolate_parallel, here just + # need to demote back to a regular variable. ds = ds.reset_coords('dy') # Apply geometry diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index 0890dd40..51fee055 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -130,7 +130,8 @@ def test_fromFieldAligned(self, tmpdir_factory, bout_xyt_example_files, nz): npt.assert_allclose(n_nal[t, 1, 3, z].values, 1000.*t + 100.*1 + 10.*3. + (z - 7) % nz, rtol=1.e-15, atol=0.) # noqa: E501 @pytest.mark.long - def test_highParallelResRegion_core(self, tmpdir_factory, bout_xyt_example_files): + def test_interpolate_parallel_region_core(self, tmpdir_factory, + bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=1, nt=1, grid='grid', guards={'y': 2}, topology='core') @@ -158,7 +159,7 @@ def f(t): n.data = f(theta).broadcast_like(n) - n_highres = n.bout.highParallelResRegion('core') + n_highres = n.bout.interpolate_parallel('core') expected = f(theta_fine).broadcast_like(n_highres) @@ -168,8 +169,9 @@ def f(t): 3, pytest.param(7, marks=pytest.mark.long), pytest.param(18, marks=pytest.mark.long)]) - def test_highParallelResRegion_core_change_n(self, tmpdir_factory, - bout_xyt_example_files, res_factor): + def test_interpolate_parallel_region_core_change_n(self, tmpdir_factory, + bout_xyt_example_files, + res_factor): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=1, nt=1, grid='grid', guards={'y': 2}, topology='core') @@ -198,14 +200,15 @@ def f(t): n.data = f(theta).broadcast_like(n) - n_highres = n.bout.highParallelResRegion('core', n=res_factor) + n_highres = n.bout.interpolate_parallel('core', n=res_factor) expected = f(theta_fine).broadcast_like(n_highres) npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) @pytest.mark.long - def test_highParallelResRegion_sol(self, tmpdir_factory, bout_xyt_example_files): + def test_interpolate_parallel_region_sol(self, tmpdir_factory, + bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=1, nt=1, grid='grid', guards={'y': 2}, topology='sol') @@ -233,14 +236,14 @@ def f(t): n.data = f(theta).broadcast_like(n) - n_highres = n.bout.highParallelResRegion('SOL') + n_highres = n.bout.interpolate_parallel('SOL') expected = f(theta_fine).broadcast_like(n_highres) npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) - def test_highParallelResRegion_singlenull(self, tmpdir_factory, - bout_xyt_example_files): + def test_interpolate_parallel_region_singlenull(self, tmpdir_factory, + bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=3, nt=1, grid='grid', guards={'y': 2}, topology='single-null') @@ -271,27 +274,27 @@ def f(t): f_fine = f(theta_fine)[:128] for region in ['inner_PFR', 'inner_SOL']: - n_highres = n.bout.highParallelResRegion(region).isel(theta=slice(2, None)) + n_highres = n.bout.interpolate_parallel(region).isel(theta=slice(2, None)) expected = f_fine.broadcast_like(n_highres) npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) for region in ['core', 'SOL']: - n_highres = n.bout.highParallelResRegion(region) + n_highres = n.bout.interpolate_parallel(region) expected = f_fine.broadcast_like(n_highres) npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) for region in ['outer_PFR', 'outer_SOL']: - n_highres = n.bout.highParallelResRegion(region).isel(theta=slice(-2)) + n_highres = n.bout.interpolate_parallel(region).isel(theta=slice(-2)) expected = f_fine.broadcast_like(n_highres) npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.e-2) - def test_highParallelRes(self, tmpdir_factory, bout_xyt_example_files): + def test_interpolate_parallel(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=3, nt=1, grid='grid', guards={'y': 2}, topology='single-null') @@ -324,15 +327,15 @@ def f_y(t): f_fine = f_y(theta_fine)*(x + 1.) - n_highres = n.bout.highParallelRes().isel(theta=slice(2, -2)) + n_highres = n.bout.interpolate_parallel().isel(theta=slice(2, -2)) expected = f_fine.broadcast_like(n_highres) npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.1e-2) - def test_highParallelRes_toroidal_points(self, tmpdir_factory, - bout_xyt_example_files): + def test_interpolate_parallel_toroidal_points(self, tmpdir_factory, + bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=3, nt=1, grid='grid', guards={'y': 2}, topology='single-null') @@ -341,14 +344,14 @@ def test_highParallelRes_toroidal_points(self, tmpdir_factory, gridfilepath=Path(path).parent.joinpath('grid.nc'), geometry='toroidal', keep_yboundaries=True) - n_highres = ds['n'].bout.highParallelRes() + n_highres = ds['n'].bout.interpolate_parallel() - n_highres_truncated = ds['n'].bout.highParallelRes(toroidal_points=2) + n_highres_truncated = ds['n'].bout.interpolate_parallel(toroidal_points=2) xrt.assert_identical(n_highres_truncated, n_highres.isel(zeta=[0, 2])) - def test_highParallelRes_toroidal_points_list(self, tmpdir_factory, - bout_xyt_example_files): + def test_interpolate_parallel_toroidal_points_list(self, tmpdir_factory, + bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, nype=3, nt=1, grid='grid', guards={'y': 2}, topology='single-null') @@ -357,10 +360,11 @@ def test_highParallelRes_toroidal_points_list(self, tmpdir_factory, gridfilepath=Path(path).parent.joinpath('grid.nc'), geometry='toroidal', keep_yboundaries=True) - n_highres = ds['n'].bout.highParallelRes() + n_highres = ds['n'].bout.interpolate_parallel() points_list = [1, 2] - n_highres_truncated = ds['n'].bout.highParallelRes(toroidal_points=points_list) + n_highres_truncated = ds['n'].bout.interpolate_parallel( + toroidal_points=points_list) xrt.assert_identical(n_highres_truncated, n_highres.isel(zeta=points_list)) From f9cdbda0f87c3a0b1588d18c1400a910c761743c Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 23 Mar 2020 22:58:28 +0000 Subject: [PATCH 52/94] Rename resetParallelInterpFactor to set_parallel_interpolation_factor ...to follow PEP8 naming convention. --- xbout/boutdataset.py | 2 +- xbout/tests/test_boutdataset.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index abd87c52..fba498bf 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -79,7 +79,7 @@ def getFieldAligned(self, name, caching=True): self.data[aligned_name] = self.data[name].bout.toFieldAligned() return self.data[aligned_name] - def resetParallelInterpFactor(self, n): + def set_parallel_interpolation_factor(self, n): """ Set the default factor to increase resolution when doing parallel interpolation. diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index 9a548ddf..e43b4150 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -85,7 +85,7 @@ def test_getFieldAligned(self, tmpdir_factory, bout_xyt_example_files): ds['n_aligned'] = ds['T'] xrt.assert_allclose(ds.bout.getFieldAligned('n'), ds['T']) - def test_resetParallelInterpFactor(self): + def test_set_parallel_interpolation_factor(self): ds = Dataset() ds['a'] = DataArray() ds = _set_attrs_on_all_vars(ds, 'metadata', {}) @@ -95,7 +95,7 @@ def test_resetParallelInterpFactor(self): with pytest.raises(KeyError): ds['a'].metadata['fine_interpolation_factor'] - ds.bout.resetParallelInterpFactor(42) + ds.bout.set_parallel_interpolation_factor(42) assert ds.metadata['fine_interpolation_factor'] == 42 assert ds['a'].metadata['fine_interpolation_factor'] == 42 From c5686c90e2a00d05029a35017d22dc4330ae0278 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 23 Mar 2020 23:04:26 +0000 Subject: [PATCH 53/94] Better test for whether ds has a 'geometry' attribute Using hasattr much more robust than checking exception message. --- xbout/boutdataset.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index fba498bf..245acb8a 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -145,13 +145,10 @@ def getHighParallelResVars(self, variables, **kwargs): ds = ds.reset_coords('dy') # Apply geometry - try: + if hasattr(ds, 'geometry'): ds = apply_geometry(ds, ds.geometry) - except AttributeError as e: - # if no geometry was originally applied, then ds has no geometry attribute - # and we can continue without applying geometry here - if str(e) != "'Dataset' object has no attribute 'geometry'": - raise + # if no geometry was originally applied, then ds has no geometry attribute and we + # can continue without applying geometry here return ds From f3a652aff8cb682f338d7ec7b38d8e025ea72fb3 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 23 Mar 2020 23:15:08 +0000 Subject: [PATCH 54/94] Rename getHighParallelResVars() to interpolate_parallel() ...to follow PEP8 naming convention. --- xbout/boutdataset.py | 10 +++++++--- xbout/tests/test_boutdataset.py | 6 +++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 245acb8a..fd458dc2 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -96,10 +96,14 @@ def set_parallel_interpolation_factor(self, n): return ds - def getHighParallelResVars(self, variables, **kwargs): + def interpolate_parallel(self, variables, **kwargs): """ - Interpolate in the parallel direction to get a higher resolution version of one - or more variables. + Interpolate in the parallel direction to get a higher resolution version of a + subset of variables. + + Note that the high-resolution variables are all loaded into memory, so most + likely it is necessary to select only a small number. The toroidal_points + argument can also be used to reduce the memory demand. Parameters ---------- diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index e43b4150..364e65ba 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -102,10 +102,10 @@ def test_set_parallel_interpolation_factor(self): @pytest.mark.parametrize(params_guards, params_guards_values) @pytest.mark.parametrize(params_boundaries, params_boundaries_values) - def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, + def test_interpolate_parallel(self, tmpdir_factory, bout_xyt_example_files, guards, keep_xboundaries, keep_yboundaries): # This test checks that the regions created in the new high-resolution Dataset by - # getHighParallelResVars are correct. + # interpolate_parallel are correct. # This test does not test the accuracy of the parallel interpolation (there are # other tests for that). @@ -122,7 +122,7 @@ def test_getHighParallelResVars(self, tmpdir_factory, bout_xyt_example_files, keep_yboundaries=keep_yboundaries) # Get high parallel resolution version of ds, and check that - ds = ds.bout.getHighParallelResVars(('n', 'T')) + ds = ds.bout.interpolate_parallel(('n', 'T')) mxg = guards['x'] myg = guards['y'] From 71f03046ff15fe594766982ab8713833c74991a0 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 23 Mar 2020 23:35:28 +0000 Subject: [PATCH 55/94] Remove accidentally-committed print statements --- xbout/region.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/xbout/region.py b/xbout/region.py index b7253162..e1b21baf 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -71,14 +71,12 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, ybndry = ds.metadata['MYG'] if self.connection_lower is None: self.ny -= ybndry - print('check ny 2', self.ny, self.connection_lower, connect_lower) # used to calculate y-coordinate of lower side (self.ylower) ylower_ind += ybndry if self.connection_upper is None: self.ny -= ybndry - print('check ny 3', self.ny, self.connection_upper, connect_upper) # used to calculate y-coordinate of upper side (self.yupper) yupper_ind -= ybndry From 0b65e1b358959aa0679f7872dcac7e5a62e5a6f7 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 23 Mar 2020 23:35:51 +0000 Subject: [PATCH 56/94] More f-strings --- xbout/tests/test_load.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/xbout/tests/test_load.py b/xbout/tests/test_load.py index 30555694..bf682d61 100644 --- a/xbout/tests/test_load.py +++ b/xbout/tests/test_load.py @@ -374,8 +374,7 @@ def create_bout_ds(syn_data_type='random', lengths=(6, 2, 4, 7), num=0, nxpe=1, ds['ny_inner'] = ny//2 elif topology == 'xpoint': if nype < 4: - raise ValueError('Not enough processors for xpoint topology: ' - + 'nype=' + str(nype)) + raise ValueError(f'Not enough processors for xpoint topology: nype={nype}') ds['ixseps1'] = nx//2 ds['ixseps2'] = nx//2 ds['jyseps1_1'] = MYSUB - 1 @@ -386,8 +385,7 @@ def create_bout_ds(syn_data_type='random', lengths=(6, 2, 4, 7), num=0, nxpe=1, ds['jyseps2_2'] = ny - MYSUB - 1 elif topology == 'single-null': if nype < 3: - raise ValueError('Not enough processors for single-null topology: ' - + 'nype=' + str(nype)) + raise ValueError(f'Not enough processors for xpoint topology: nype={nype}') ds['ixseps1'] = nx//2 ds['ixseps2'] = nx ds['jyseps1_1'] = MYSUB - 1 @@ -398,7 +396,7 @@ def create_bout_ds(syn_data_type='random', lengths=(6, 2, 4, 7), num=0, nxpe=1, elif topology == 'connected-double-null': if nype < 6: raise ValueError('Not enough processors for connected-double-null topology: ' - + 'nype=' + str(nype)) + f'nype={nype}') ds['ixseps1'] = nx//2 ds['ixseps2'] = nx//2 ds['jyseps1_1'] = MYSUB - 1 @@ -410,12 +408,12 @@ def create_bout_ds(syn_data_type='random', lengths=(6, 2, 4, 7), num=0, nxpe=1, elif topology == 'disconnected-double-null': if nype < 6: raise ValueError('Not enough processors for disconnected-double-null ' - + 'topology: nype=' + str(nype)) + f'topology: nype={nype}') ds['ixseps1'] = nx//2 ds['ixseps2'] = nx//2 + 4 if ds['ixseps2'] >= nx: raise ValueError('Not enough points in the x-direction. ixseps2=' - + str(ds['ixseps2']) + ' > nx=' + str(nx)) + f'{ds["ixseps2"]} > nx={nx}') ds['jyseps1_1'] = MYSUB - 1 ny_inner = 3*MYSUB ds['ny_inner'] = ny_inner @@ -423,7 +421,7 @@ def create_bout_ds(syn_data_type='random', lengths=(6, 2, 4, 7), num=0, nxpe=1, ds['jyseps1_2'] = ny_inner + MYSUB - 1 ds['jyseps2_2'] = ny - MYSUB - 1 else: - raise ValueError('Unrecognised topology=' + str(topology)) + raise ValueError(f'Unrecognised topology={topology}') one = DataArray(np.ones((x_length, y_length)), dims=['x', 'y']) zero = DataArray(np.zeros((x_length, y_length)), dims=['x', 'y']) From 55b7fb99b5b713cbc4b6d3c2022f61d16f54306c Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 24 Mar 2020 09:46:40 +0000 Subject: [PATCH 57/94] Fix merge of test_toFieldAligned from regions --- xbout/tests/test_boutdataarray.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index 8980aef7..8c51835f 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -55,28 +55,28 @@ def test_toFieldAligned(self, tmpdir_factory, bout_xyt_example_files, nz): n_al = n.bout.toFieldAligned() for t in range(ds.sizes['t']): for z in range(nz): - assert_allclose(n_al[t, 0, 0, z].values, 1000.*t + z % nz, rtol=1.e-15, atol=5.e-16) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 0, z].values, 1000.*t + z % nz, rtol=1.e-15, atol=5.e-16) # noqa: E501 for z in range(nz): - assert_allclose(n_al[t, 0, 1, z].values, 1000.*t + 10.*1. + (z + 1) % nz, rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 1, z].values, 1000.*t + 10.*1. + (z + 1) % nz, rtol=1.e-15, atol=0.) # noqa: E501 for z in range(nz): - assert_allclose(n_al[t, 0, 2, z].values, 1000.*t + 10.*2. + (z + 2) % nz, rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 2, z].values, 1000.*t + 10.*2. + (z + 2) % nz, rtol=1.e-15, atol=0.) # noqa: E501 for z in range(nz): - assert_allclose(n_al[t, 0, 3, z].values, 1000.*t + 10.*3. + (z + 3) % nz, rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 0, 3, z].values, 1000.*t + 10.*3. + (z + 3) % nz, rtol=1.e-15, atol=0.) # noqa: E501 for z in range(nz): - assert_allclose(n_al[t, 1, 0, z].values, 1000.*t + 100.*1 + 10.*0. + (z + 4) % nz, rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 0, z].values, 1000.*t + 100.*1 + 10.*0. + (z + 4) % nz, rtol=1.e-15, atol=0.) # noqa: E501 for z in range(nz): - assert_allclose(n_al[t, 1, 1, z].values, 1000.*t + 100.*1 + 10.*1. + (z + 5) % nz, rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 1, z].values, 1000.*t + 100.*1 + 10.*1. + (z + 5) % nz, rtol=1.e-15, atol=0.) # noqa: E501 for z in range(nz): - assert_allclose(n_al[t, 1, 2, z].values, 1000.*t + 100.*1 + 10.*2. + (z + 6) % nz, rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 2, z].values, 1000.*t + 100.*1 + 10.*2. + (z + 6) % nz, rtol=1.e-15, atol=0.) # noqa: E501 for z in range(nz): - assert_allclose(n_al[t, 1, 3, z].values, 1000.*t + 100.*1 + 10.*3. + (z + 7) % nz, rtol=1.e-15, atol=0.) # noqa: E501 + npt.assert_allclose(n_al[t, 1, 3, z].values, 1000.*t + 100.*1 + 10.*3. + (z + 7) % nz, rtol=1.e-15, atol=0.) # noqa: E501 def test_toFieldAligned_dask(self, tmpdir_factory, bout_xyt_example_files): From 8c2ff03eea5dc062ea84efd587bc9be3aea2d50f Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 24 Mar 2020 12:08:18 +0000 Subject: [PATCH 58/94] Add Ellipsis option for 'variables' arg BoutDataset.interpolate_parallel If 'variables=...' is passed, then interpolate all variables in the Dataset. --- xbout/boutdataset.py | 14 ++++++++++++-- xbout/tests/test_boutdataset.py | 20 ++++++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index fd458dc2..7a58a6bc 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -107,8 +107,9 @@ def interpolate_parallel(self, variables, **kwargs): Parameters ---------- - variables : str or sequence of str - The names of the variables to interpolate + variables : str or sequence of str or ... + The names of the variables to interpolate. If 'variables=...' is passed + explicitly, then interpolate all variables in the Dataset. n : int, optional The factor to increase the resolution by. Defaults to the value set by BoutDataset.setupParallelInterp(), or 10 if that has not been called. @@ -126,6 +127,15 @@ def interpolate_parallel(self, variables, **kwargs): A new Dataset containing a high-resolution versions of the variables. The new Dataset is a valid BoutDataset, although containing only the specified variables. """ + + if variables is ...: + variables = [v for v in self.data] + if 'dy' in variables: + # dy is treated specially, as it is converted to a coordinate, and then + # converted back again below, so must not call + # interpolate_parallel('dy'). + variables.remove('dy') + if isinstance(variables, str): ds = self.data[variables].bout.interpolate_parallel(return_dataset=True, **kwargs) diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index 364e65ba..8bae815f 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -432,6 +432,26 @@ def test_interpolate_parallel(self, tmpdir_factory, bout_xyt_example_files, theta=slice(jys22 + 1 - myg, jys22 + 1)).values, v_lower_outer_SOL.isel(theta=slice(myg)).values) + def test_interpolate_parallel_all_variables_arg(self, tmpdir_factory, + bout_xyt_example_files): + # Check that passing 'variables=...' to interpolate_parallel() does actually + # interpolate all the variables + path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 4, 3), nxpe=1, + nype=1, nt=1, grid='grid', topology='sol') + + ds = open_boutdataset(datapath=path, + gridfilepath=Path(path).parent.joinpath('grid.nc'), + geometry='toroidal') + + # Get high parallel resolution version of ds, and check that + ds = ds.bout.interpolate_parallel(...) + + interpolated_variables = [v for v in ds] + + assert set(interpolated_variables) == set(('n', 'T', 'g11', 'g22', 'g33', 'g12', + 'g13', 'g23', 'g_11', 'g_22', 'g_33', 'g_12', 'g_13', 'g_23', 'G1', 'G2', + 'G3', 'J', 'Bxy', 'dx', 'dy')) + class TestLoadInputFile: @pytest.mark.skip From d06e38fd2b317334f1ab58d06f0dd80c2ee67d2f Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 24 Mar 2020 13:15:36 +0000 Subject: [PATCH 59/94] Extra test for interpolate_parallel --- xbout/tests/test_boutdataarray.py | 40 +++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index 8c51835f..27cd3b09 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -391,6 +391,46 @@ def f_y(t): npt.assert_allclose(n_highres.values, expected.values, rtol=0., atol=1.1e-2) + def test_interpolate_parallel_sol(self, tmpdir_factory, bout_xyt_example_files): + path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, + nype=1, nt=1, grid='grid', guards={'y': 2}, + topology='sol') + + ds = open_boutdataset(datapath=path, + gridfilepath=Path(path).parent.joinpath('grid.nc'), + geometry='toroidal', keep_yboundaries=True) + + n = ds['n'] + + thetalength = 2.*np.pi + + dtheta = thetalength/16. + theta = xr.DataArray(np.linspace(0. - 1.5*dtheta, thetalength + 1.5*dtheta, 20), + dims='theta') + + dtheta_fine = thetalength/128. + theta_fine = xr.DataArray( + np.linspace(0. + 0.5*dtheta_fine, thetalength - 0.5*dtheta_fine, 128), + dims='theta') + x = xr.DataArray(np.arange(3), dims='x') + + def f_y(t): + t = np.sin(t) + return (t**3 - t**2 + t - 1.) + + f = f_y(theta) * (x + 1.) + + n.data = f.broadcast_like(n) + + f_fine = f_y(theta_fine)*(x + 1.) + + n_highres = n.bout.interpolate_parallel().isel(theta=slice(2, -2)) + + expected = f_fine.broadcast_like(n_highres) + + npt.assert_allclose(n_highres.values, expected.values, + rtol=0., atol=1.1e-2) + def test_interpolate_parallel_toroidal_points(self, tmpdir_factory, bout_xyt_example_files): path = bout_xyt_example_files(tmpdir_factory, lengths=(2, 3, 16, 3), nxpe=1, From 50af75326999fc70b6d291b7635f302be5c61c0c Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 24 Mar 2020 13:57:41 +0000 Subject: [PATCH 60/94] Fix update of jyseps to higher resolution If any of the jyseps* are less than or equal to zero, don't update because the update jyseps=n*(jyseps+1)-1 would make jyseps positive if it is zero to start with, which is not correct, because increasing the resolution does not increase the index of the lower boundary. --- xbout/utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/xbout/utils.py b/xbout/utils.py index 709dab24..771a3533 100644 --- a/xbout/utils.py +++ b/xbout/utils.py @@ -61,7 +61,9 @@ def _update_metadata_increased_resolution(da, n): da.attrs['metadata'] = deepcopy(da.metadata) def update_jyseps(name): - da.metadata[name] = n*(da.metadata[name] + 1) - 1 + # If any jyseps<=0, need to leave as is + if da.metadata[name] > 0: + da.metadata[name] = n*(da.metadata[name] + 1) - 1 update_jyseps('jyseps1_1') update_jyseps('jyseps2_1') update_jyseps('jyseps1_2') From df25fc00bfb5dd246d179ea57269cf43291e13ef Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 24 Mar 2020 14:06:59 +0000 Subject: [PATCH 61/94] Add t_array as the t-coordinate in apply_geometry() --- xbout/geometries.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/xbout/geometries.py b/xbout/geometries.py index 12cc1ebc..00f40d89 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -76,9 +76,16 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): # long as these bounds are consistent with the global coordinates defined in # Region.__init__() (we will only use these coordinates for interpolation) and it is # simplest to calculate them with cumsum(). + tcoord = updated_ds.metadata.get('bout_tdim', 't') xcoord = updated_ds.metadata.get('bout_xdim', 'x') ycoord = updated_ds.metadata.get('bout_ydim', 'y') zcoord = updated_ds.metadata.get('bout_zdim', 'z') + + if (tcoord not in ds.coords) and (tcoord in ds.dims): + # Create the time coordinate from t_array + updated_ds = updated_ds.rename({'t_array': tcoord}) + updated_ds = updated_ds.set_coords(tcoord) + if xcoord not in ds.coords: # Make index 'x' a coordinate, useful for handling global indexing # Note we have to use the index value, not the value calculated from 'dx' because From ac415887058811fa0af5d0ebbe256d98ddef3e1c Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 24 Mar 2020 14:31:31 +0000 Subject: [PATCH 62/94] PEP8 fixes --- xbout/tests/test_boutdataset.py | 2 +- xbout/tests/test_region.py | 33 +++++++++++++++++++++------------ 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index 8bae815f..48d60d70 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -103,7 +103,7 @@ def test_set_parallel_interpolation_factor(self): @pytest.mark.parametrize(params_guards, params_guards_values) @pytest.mark.parametrize(params_boundaries, params_boundaries_values) def test_interpolate_parallel(self, tmpdir_factory, bout_xyt_example_files, - guards, keep_xboundaries, keep_yboundaries): + guards, keep_xboundaries, keep_yboundaries): # This test checks that the regions created in the new high-resolution Dataset by # interpolate_parallel are correct. # This test does not test the accuracy of the parallel interpolation (there are diff --git a/xbout/tests/test_region.py b/xbout/tests/test_region.py index a5008539..60fb2e41 100644 --- a/xbout/tests/test_region.py +++ b/xbout/tests/test_region.py @@ -1006,7 +1006,8 @@ def test_region_disconnecteddoublenull_get_one_guard( n = ds['n'] - n_lower_inner_PFR = n.bout.from_region('lower_inner_PFR', with_guards=with_guards) + n_lower_inner_PFR = n.bout.from_region('lower_inner_PFR', + with_guards=with_guards) # Remove attributes that are expected to be different del n_lower_inner_PFR.attrs['region'] @@ -1021,7 +1022,7 @@ def test_region_disconnecteddoublenull_get_one_guard( n_lower_inner_PFR.isel(theta=slice(-yguards, None)).values) n_lower_inner_intersep = n.bout.from_region('lower_inner_intersep', - with_guards=with_guards) + with_guards=with_guards) # Remove attributes that are expected to be different del n_lower_inner_intersep.attrs['region'] @@ -1037,7 +1038,8 @@ def test_region_disconnecteddoublenull_get_one_guard( theta=slice(jys11 + 1, jys11 + 1 + yguards)).values, n_lower_inner_intersep.isel(theta=slice(-yguards, None)).values) - n_lower_inner_SOL = n.bout.from_region('lower_inner_SOL', with_guards=with_guards) + n_lower_inner_SOL = n.bout.from_region('lower_inner_SOL', + with_guards=with_guards) # Remove attributes that are expected to be different del n_lower_inner_SOL.attrs['region'] @@ -1106,7 +1108,8 @@ def test_region_disconnecteddoublenull_get_one_guard( theta=slice(jys21 + 1, jys21 + 1 + yguards)).values, n_inner_sol.isel(theta=slice(-yguards, None)).values) - n_upper_inner_PFR = n.bout.from_region('upper_inner_PFR', with_guards=with_guards) + n_upper_inner_PFR = n.bout.from_region('upper_inner_PFR', + with_guards=with_guards) # Remove attributes that are expected to be different del n_upper_inner_PFR.attrs['region'] @@ -1135,7 +1138,8 @@ def test_region_disconnecteddoublenull_get_one_guard( theta=slice(jys12 + 1 - yguards, jys12 + 1)).values, n_upper_inner_intersep.isel(theta=slice(yguards)).values) - n_upper_inner_SOL = n.bout.from_region('upper_inner_SOL', with_guards=with_guards) + n_upper_inner_SOL = n.bout.from_region('upper_inner_SOL', + with_guards=with_guards) # Remove attributes that are expected to be different del n_upper_inner_SOL.attrs['region'] @@ -1149,7 +1153,8 @@ def test_region_disconnecteddoublenull_get_one_guard( theta=slice(jys21 + 1 - yguards, jys21 + 1)).values, n_upper_inner_SOL.isel(theta=slice(yguards)).values) - n_upper_outer_PFR = n.bout.from_region('upper_outer_PFR', with_guards=with_guards) + n_upper_outer_PFR = n.bout.from_region('upper_outer_PFR', + with_guards=with_guards) # Remove attributes that are expected to be different del n_upper_outer_PFR.attrs['region'] @@ -1181,7 +1186,8 @@ def test_region_disconnecteddoublenull_get_one_guard( n_upper_outer_intersep.isel( theta=slice(-yguards, None)).values) - n_upper_outer_SOL = n.bout.from_region('upper_outer_SOL', with_guards=with_guards) + n_upper_outer_SOL = n.bout.from_region('upper_outer_SOL', + with_guards=with_guards) # Remove attributes that are expected to be different del n_upper_outer_SOL.attrs['region'] @@ -1251,7 +1257,8 @@ def test_region_disconnecteddoublenull_get_one_guard( theta=slice(jys22 + 1, jys22 + 1 + yguards)).values, n_outer_sol.isel(theta=slice(-yguards, None)).values) - n_lower_outer_PFR = n.bout.from_region('lower_outer_PFR', with_guards=with_guards) + n_lower_outer_PFR = n.bout.from_region('lower_outer_PFR', + with_guards=with_guards) # Remove attributes that are expected to be different del n_lower_outer_PFR.attrs['region'] @@ -1280,7 +1287,8 @@ def test_region_disconnecteddoublenull_get_one_guard( theta=slice(jys22 + 1 - yguards, jys22 + 1)).values, n_lower_outer_intersep.isel(theta=slice(yguards)).values) - n_lower_outer_SOL = n.bout.from_region('lower_outer_SOL', with_guards=with_guards) + n_lower_outer_SOL = n.bout.from_region('lower_outer_SOL', + with_guards=with_guards) # Remove attributes that are expected to be different del n_lower_outer_SOL.attrs['region'] @@ -1290,6 +1298,7 @@ def test_region_disconnecteddoublenull_get_one_guard( if yguards > 0: # check y-guards, which were 'communicated' by from_region # Coordinates are not equal, so only compare array values - npt.assert_equal(n.isel(x=slice(ixs2 - xguards, None), - theta=slice(jys22 + 1 - yguards, jys22 + 1)).values, - n_lower_outer_SOL.isel(theta=slice(yguards)).values) + npt.assert_equal( + n.isel(x=slice(ixs2 - xguards, None), + theta=slice(jys22 + 1 - yguards, jys22 + 1)).values, + n_lower_outer_SOL.isel(theta=slice(yguards)).values) From 6a7a748d9cc87d789e3856666ab3f31afeb74739 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 24 Mar 2020 16:17:30 +0000 Subject: [PATCH 63/94] Update to incorporate variable and argument name changes from regions --- xbout/boutdataarray.py | 4 +- xbout/region.py | 219 +++++++++++++++++++++-------------------- 2 files changed, 114 insertions(+), 109 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 2405307f..c827b23b 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -278,11 +278,11 @@ def interpolate_parallel(self, region=None, *, n=None, toroidal_points=None, dy = (region.yupper - region.ylower)/ny_fine myg = da.metadata['MYG'] - if da.metadata['keep_yboundaries'] and region.connection_lower is None: + if da.metadata['keep_yboundaries'] and region.connection_lower_y is None: ybndry_lower = myg else: ybndry_lower = 0 - if da.metadata['keep_yboundaries'] and region.connection_upper is None: + if da.metadata['keep_yboundaries'] and region.connection_upper_y is None: ybndry_upper = myg else: ybndry_upper = 0 diff --git a/xbout/region.py b/xbout/region.py index 62b8ab86..fafb6068 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -56,13 +56,13 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, # self.xinner, self.xouter, self.ylower, self.yupper if ds.metadata['keep_xboundaries']: xbndry = ds.metadata['MXG'] - if self.connection_inner is None: + if self.connection_inner_x is None: self.nx -= xbndry # used to calculate x-coordinate of inner side (self.xinner) xinner_ind += xbndry - if self.connection_outer is None: + if self.connection_outer_x is None: self.nx -= xbndry # used to calculate x-coordinate of outer side (self.xouter) @@ -70,13 +70,13 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, if ds.metadata['keep_yboundaries']: ybndry = ds.metadata['MYG'] - if self.connection_lower is None: + if self.connection_lower_y is None: self.ny -= ybndry # used to calculate y-coordinate of lower side (self.ylower) ylower_ind += ybndry - if self.connection_upper is None: + if self.connection_upper_y is None: self.ny -= ybndry # used to calculate y-coordinate of upper side (self.yupper) @@ -286,34 +286,34 @@ def _get_topology(ds): def _check_connections(regions): for region in regions.values(): - if region.connection_inner is not None: - if regions[region.connection_inner].connection_outer != region.name: + if region.connection_inner_x is not None: + if regions[region.connection_inner_x].connection_outer_x != region.name: raise ValueError( - f'Inner connection of {region.name} is ' - f'{region.connection_inner}, but outer connection of ' - f'{region.connection_inner} is ' - f'{regions[region.connection_inner].connection_outer}') - if region.connection_outer is not None: - if regions[region.connection_outer].connection_inner != region.name: + f'Inner-x connection of {region.name} is ' + f'{region.connection_inner_x}, but outer-x connection of ' + f'{region.connection_inner_x} is ' + f'{regions[region.connection_inner_x].connection_outer_x}') + if region.connection_outer_x is not None: + if regions[region.connection_outer_x].connection_inner_x != region.name: raise ValueError( - f'Inner connection of {region.name} is ' - f'{region.connection_outer}, but inner connection of ' - f'{region.connection_outer} is ' - f'{regions[region.connection_outer].connection_inner}') - if region.connection_lower is not None: - if regions[region.connection_lower].connection_upper != region.name: + f'Inner-x connection of {region.name} is ' + f'{region.connection_outer_x}, but inner-x connection of ' + f'{region.connection_outer_x} is ' + f'{regions[region.connection_outer_x].connection_inner_x}') + if region.connection_lower_y is not None: + if regions[region.connection_lower_y].connection_upper_y != region.name: raise ValueError( - f'Inner connection of {region.name} is ' - f'{region.connection_lower}, but upper connection of ' - f'{region.connection_lower} is ' - f'{regions[region.connection_lower].connection_upper}') - if region.connection_upper is not None: - if regions[region.connection_upper].connection_lower != region.name: + f'Lower-y connection of {region.name} is ' + f'{region.connection_lower_y}, but upper-y connection of ' + f'{region.connection_lower_y} is ' + f'{regions[region.connection_lower_y].connection_upper_y}') + if region.connection_upper_y is not None: + if regions[region.connection_upper_y].connection_lower_y != region.name: raise ValueError( - f'Inner connection of {region.name} is ' - f'{region.connection_upper}, but lower connection of ' - f'{region.connection_upper} is ' - f'{regions[region.connection_upper].connection_lower}') + f'Upper-y connection of {region.name} is ' + f'{region.connection_upper_y}, but lower-y connection of ' + f'{region.connection_upper_y} is ' + f'{regions[region.connection_upper_y].connection_lower_y}') topologies = {} @@ -324,84 +324,88 @@ def topology_disconnected_double_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_in regions = {} regions['lower_inner_PFR'] = Region( name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_intersep', - connect_upper='lower_outer_PFR') + ylower_ind=0, yupper_ind=jys11 + 1, + connection_outer_x='lower_inner_intersep', + connection_upper_y='lower_outer_PFR') regions['lower_inner_intersep'] = Region( name='lower_inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', - connect_outer='lower_inner_SOL', connect_upper='inner_intersep') + ylower_ind=0, yupper_ind=jys11 + 1, connection_inner_x='lower_inner_PFR', + connection_outer_x='lower_inner_SOL', connection_upper_y='inner_intersep') regions['lower_inner_SOL'] = Region( name='lower_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_intersep', - connect_upper='inner_SOL') + ylower_ind=0, yupper_ind=jys11 + 1, + connection_inner_x='lower_inner_intersep', connection_upper_y='inner_SOL') regions['inner_core'] = Region( name='inner_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, - connect_outer='inner_intersep', connect_lower='outer_core', - connect_upper='outer_core') + connection_outer_x='inner_intersep', connection_lower_y='outer_core', + connection_upper_y='outer_core') regions['inner_intersep'] = Region( name='inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_inner='inner_core', - connect_outer='inner_SOL', connect_lower='lower_inner_intersep', - connect_upper='outer_intersep') + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connection_inner_x='inner_core', + connection_outer_x='inner_SOL', connection_lower_y='lower_inner_intersep', + connection_upper_y='outer_intersep') regions['inner_SOL'] = Region( name='inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, - connect_inner='inner_intersep', connect_lower='lower_inner_SOL', - connect_upper='upper_inner_SOL') + connection_inner_x='inner_intersep', connection_lower_y='lower_inner_SOL', + connection_upper_y='upper_inner_SOL') regions['upper_inner_PFR'] = Region( name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys21 + 1, yupper_ind=ny_inner, - connect_outer='upper_inner_intersep', connect_lower='upper_outer_PFR') + connection_outer_x='upper_inner_intersep', + connection_lower_y='upper_outer_PFR') regions['upper_inner_intersep'] = Region( name='upper_inner_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, ylower_ind=jys21 + 1, yupper_ind=ny_inner, - connect_inner='upper_inner_PFR', connect_outer='upper_inner_SOL', - connect_lower='upper_outer_intersep') + connection_inner_x='upper_inner_PFR', connection_outer_x='upper_inner_SOL', + connection_lower_y='upper_outer_intersep') regions['upper_inner_SOL'] = Region( name='upper_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, ylower_ind=jys21 + 1, yupper_ind=ny_inner, - connect_inner='upper_inner_intersep', connect_lower='inner_SOL') + connection_inner_x='upper_inner_intersep', connection_lower_y='inner_SOL') regions['upper_outer_PFR'] = Region( name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=ny_inner, yupper_ind=jys12 + 1, - connect_outer='upper_outer_intersep', connect_upper='upper_inner_PFR') + connection_outer_x='upper_outer_intersep', + connection_upper_y='upper_inner_PFR') regions['upper_outer_intersep'] = Region( name='upper_outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, ylower_ind=ny_inner, yupper_ind=jys12 + 1, - connect_inner='upper_outer_PFR', connect_outer='upper_outer_SOL', - connect_upper='upper_inner_intersep') + connection_inner_x='upper_outer_PFR', connection_outer_x='upper_outer_SOL', + connection_upper_y='upper_inner_intersep') regions['upper_outer_SOL'] = Region( name='upper_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, ylower_ind=ny_inner, yupper_ind=jys12 + 1, - connect_inner='upper_outer_intersep', connect_upper='outer_SOL') + connection_inner_x='upper_outer_intersep', connection_upper_y='outer_SOL') regions['outer_core'] = Region( name='outer_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, - connect_outer='outer_intersep', connect_lower='inner_core', - connect_upper='inner_core') + connection_outer_x='outer_intersep', connection_lower_y='inner_core', + connection_upper_y='inner_core') regions['outer_intersep'] = Region( name='outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_inner='outer_core', - connect_outer='outer_SOL', connect_lower='inner_intersep', - connect_upper='lower_outer_intersep') + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connection_inner_x='outer_core', + connection_outer_x='outer_SOL', connection_lower_y='inner_intersep', + connection_upper_y='lower_outer_intersep') regions['outer_SOL'] = Region( name='outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, - connect_inner='outer_intersep', connect_lower='upper_outer_SOL', - connect_upper='lower_outer_SOL') + connection_inner_x='outer_intersep', connection_lower_y='upper_outer_SOL', + connection_upper_y='lower_outer_SOL') regions['lower_outer_PFR'] = Region( name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys22 + 1, yupper_ind=ny, - connect_outer='lower_outer_intersep', connect_lower='lower_inner_PFR') + connection_outer_x='lower_outer_intersep', + connection_lower_y='lower_inner_PFR') regions['lower_outer_intersep'] = Region( name='lower_outer_intersep', ds=ds, xinner_ind=ixs1, xouter_ind=ixs2, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', - connect_outer='lower_outer_SOL', connect_lower='outer_intersep') + ylower_ind=jys22 + 1, yupper_ind=ny, connection_inner_x='lower_outer_PFR', + connection_outer_x='lower_outer_SOL', connection_lower_y='outer_intersep') regions['lower_outer_SOL'] = Region( name='lower_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, ylower_ind=jys22 + 1, yupper_ind=ny, - connect_inner='lower_outer_intersep', connect_lower='outer_SOL') + connection_inner_x='lower_outer_intersep', connection_lower_y='outer_SOL') return regions @@ -413,52 +417,52 @@ def topology_connected_double_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner regions = {} regions['lower_inner_PFR'] = Region( name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_SOL', - connect_upper='lower_outer_PFR') + ylower_ind=0, yupper_ind=jys11 + 1, connection_outer_x='lower_inner_SOL', + connection_upper_y='lower_outer_PFR') regions['lower_inner_SOL'] = Region( name='lower_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', - connect_upper='inner_SOL') + ylower_ind=0, yupper_ind=jys11 + 1, connection_inner_x='lower_inner_PFR', + connection_upper_y='inner_SOL') regions['inner_core'] = Region( name='inner_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_outer='inner_SOL', - connect_lower='outer_core', connect_upper='outer_core') + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connection_outer_x='inner_SOL', + connection_lower_y='outer_core', connection_upper_y='outer_core') regions['inner_SOL'] = Region( name='inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connect_inner='inner_core', - connect_lower='lower_inner_SOL', connect_upper='upper_inner_SOL') + ylower_ind=jys11 + 1, yupper_ind=jys21 + 1, connection_inner_x='inner_core', + connection_lower_y='lower_inner_SOL', connection_upper_y='upper_inner_SOL') regions['upper_inner_PFR'] = Region( name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys21 + 1, yupper_ind=ny_inner, - connect_outer='upper_inner_SOL', connect_lower='upper_outer_PFR') + connection_outer_x='upper_inner_SOL', connection_lower_y='upper_outer_PFR') regions['upper_inner_SOL'] = Region( name='upper_inner_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, ylower_ind=jys21 + 1, yupper_ind=ny_inner, - connect_inner='upper_inner_PFR', connect_lower='inner_SOL') + connection_inner_x='upper_inner_PFR', connection_lower_y='inner_SOL') regions['upper_outer_PFR'] = Region( name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=ny_inner, yupper_ind=jys12 + 1, - connect_outer='upper_outer_SOL', connect_upper='upper_inner_PFR') + connection_outer_x='upper_outer_SOL', connection_upper_y='upper_inner_PFR') regions['upper_outer_SOL'] = Region( name='upper_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, ylower_ind=ny_inner, yupper_ind=jys12 + 1, - connect_inner='upper_outer_PFR', connect_upper='outer_SOL') + connection_inner_x='upper_outer_PFR', connection_upper_y='outer_SOL') regions['outer_core'] = Region( name='outer_core', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_outer='outer_SOL', - connect_lower='inner_core', connect_upper='inner_core') + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connection_outer_x='outer_SOL', + connection_lower_y='inner_core', connection_upper_y='inner_core') regions['outer_SOL'] = Region( name='outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connect_inner='outer_core', - connect_lower='upper_outer_SOL', connect_upper='lower_outer_SOL') + ylower_ind=jys12 + 1, yupper_ind=jys22 + 1, connection_inner_x='outer_core', + connection_lower_y='upper_outer_SOL', connection_upper_y='lower_outer_SOL') regions['lower_outer_PFR'] = Region( name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_SOL', - connect_lower='lower_inner_PFR') + ylower_ind=jys22 + 1, yupper_ind=ny, connection_outer_x='lower_outer_SOL', + connection_lower_y='lower_inner_PFR') regions['lower_outer_SOL'] = Region( name='lower_outer_SOL', ds=ds, xinner_ind=ixs2, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', - connect_lower='outer_SOL') + ylower_ind=jys22 + 1, yupper_ind=ny, connection_inner_x='lower_outer_PFR', + connection_lower_y='outer_SOL') return regions @@ -470,27 +474,28 @@ def topology_single_null(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, j regions = {} regions['inner_PFR'] = Region( name='inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=0, - yupper_ind=jys11 + 1, connect_outer='inner_SOL', - connect_upper='outer_PFR') + yupper_ind=jys11 + 1, connection_outer_x='inner_SOL', + connection_upper_y='outer_PFR') regions['inner_SOL'] = Region( name='inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=0, - yupper_ind=jys11 + 1, connect_inner='inner_PFR', connect_upper='SOL') + yupper_ind=jys11 + 1, connection_inner_x='inner_PFR', + connection_upper_y='SOL') regions['core'] = Region( name='core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys11 + 1, - yupper_ind=jys22 + 1, connect_outer='SOL', connect_lower='core', - connect_upper='core') + yupper_ind=jys22 + 1, connection_outer_x='SOL', connection_lower_y='core', + connection_upper_y='core') regions['SOL'] = Region( name='SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=jys11 + 1, - yupper_ind=jys22 + 1, connect_inner='core', connect_lower='inner_SOL', - connect_upper='outer_SOL') + yupper_ind=jys22 + 1, connection_inner_x='core', + connection_lower_y='inner_SOL', connection_upper_y='outer_SOL') regions['outer_PFR'] = Region( name='outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='outer_SOL', - connect_lower='inner_PFR') + ylower_ind=jys22 + 1, yupper_ind=ny, connection_outer_x='outer_SOL', + connection_lower_y='inner_PFR') regions['outer_SOL'] = Region( name='outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='outer_PFR', - connect_lower='SOL') + ylower_ind=jys22 + 1, yupper_ind=ny, connection_inner_x='outer_PFR', + connection_lower_y='SOL') return regions @@ -502,11 +507,11 @@ def topology_limiter(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22 regions = {} regions['core'] = Region( name='core', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=ybndry, - yupper_ind=ny - ybndry, connect_outer='SOL', connect_lower='core', - connect_upper='core') + yupper_ind=ny - ybndry, connection_outer_x='SOL', connection_lower_y='core', + connection_upper_y='core') regions['SOL'] = Region( name='SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=0, - yupper_ind=ny, connect_inner='core') + yupper_ind=ny, connection_inner_x='core') return regions @@ -518,7 +523,7 @@ def topology_core(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, n regions = {} regions['core'] = Region( name='core', ds=ds, xinner_ind=0, xouter_ind=nx, ylower_ind=ybndry, - yupper_ind=ny - ybndry, connect_lower='core', connect_upper='core') + yupper_ind=ny - ybndry, connection_lower_y='core', connection_upper_y='core') return regions @@ -542,36 +547,36 @@ def topology_xpoint(*, ds, ixs1, ixs2, nx, jys11, jys21, ny_inner, jys12, jys22, regions = {} regions['lower_inner_PFR'] = Region( name='lower_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=0, yupper_ind=jys11 + 1, connect_outer='lower_inner_SOL', - connect_upper='lower_outer_PFR') + ylower_ind=0, yupper_ind=jys11 + 1, connection_outer_x='lower_inner_SOL', + connection_upper_y='lower_outer_PFR') regions['lower_inner_SOL'] = Region( name='lower_inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=0, yupper_ind=jys11 + 1, connect_inner='lower_inner_PFR', - connect_upper='upper_inner_SOL') + ylower_ind=0, yupper_ind=jys11 + 1, connection_inner_x='lower_inner_PFR', + connection_upper_y='upper_inner_SOL') regions['upper_inner_PFR'] = Region( name='upper_inner_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=jys11 + 1, yupper_ind=ny_inner, - connect_outer='upper_inner_SOL', connect_lower='upper_outer_PFR') + connection_outer_x='upper_inner_SOL', connection_lower_y='upper_outer_PFR') regions['upper_inner_SOL'] = Region( name='upper_inner_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=jys11 + 1, yupper_ind=ny_inner, - connect_inner='upper_inner_PFR', connect_lower='lower_inner_SOL') + connection_inner_x='upper_inner_PFR', connection_lower_y='lower_inner_SOL') regions['upper_outer_PFR'] = Region( name='upper_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, ylower_ind=ny_inner, yupper_ind=jys22 + 1, - connect_outer='upper_outer_SOL', connect_upper='upper_inner_PFR') + connection_outer_x='upper_outer_SOL', connection_upper_y='upper_inner_PFR') regions['upper_outer_SOL'] = Region( name='upper_outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, ylower_ind=ny_inner, yupper_ind=jys22 + 1, - connect_inner='upper_outer_PFR', connect_upper='lower_outer_SOL') + connection_inner_x='upper_outer_PFR', connection_upper_y='lower_outer_SOL') regions['lower_outer_PFR'] = Region( name='lower_outer_PFR', ds=ds, xinner_ind=0, xouter_ind=ixs1, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_outer='lower_outer_SOL', - connect_lower='lower_inner_PFR') + ylower_ind=jys22 + 1, yupper_ind=ny, connection_outer_x='lower_outer_SOL', + connection_lower_y='lower_inner_PFR') regions['lower_outer_SOL'] = Region( name='lower_outer_SOL', ds=ds, xinner_ind=ixs1, xouter_ind=nx, - ylower_ind=jys22 + 1, yupper_ind=ny, connect_inner='lower_outer_PFR', - connect_lower='upper_outer_SOL') + ylower_ind=jys22 + 1, yupper_ind=ny, connection_inner_x='lower_outer_PFR', + connection_lower_y='upper_outer_SOL') return regions From fafe7ae5650b142aa58d791efc15b19fae7a7dec Mon Sep 17 00:00:00 2001 From: John Omotani Date: Thu, 26 Mar 2020 01:26:44 +0000 Subject: [PATCH 64/94] Fix merge of regions Need to select 1d slice of dx in Region.__init__() --- xbout/region.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xbout/region.py b/xbout/region.py index fafb6068..7c80e869 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -101,7 +101,7 @@ def __init__(self, *, name, ds=None, xinner_ind=None, xouter_ind=None, # particular regions, so do not need to be consistent between different # regions (e.g. core and PFR), so we are not forced to use just the index # value here. - dx = ds['dx'] + dx = ds['dx'].isel({self.ycoord: ylower_ind}) dx_cumsum = dx.cumsum() self.xinner = dx_cumsum[xinner_ind] - dx[xinner_ind] self.xouter = dx_cumsum[xouter_ind - 1] + dx[xouter_ind - 1] From 1d3e5c7290e4a08aa6f0074490844aed958c2556 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 4 Apr 2020 17:18:23 +0100 Subject: [PATCH 65/94] Refactor fine_interpolation_factor as a @property --- xbout/boutdataarray.py | 24 ++++++++++++++++++++---- xbout/boutdataset.py | 13 +++++++++---- xbout/load.py | 4 ++++ 3 files changed, 33 insertions(+), 8 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index c827b23b..711d7b1b 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -187,6 +187,25 @@ def from_region(self, name, with_guards=None): return da + @property + def fine_interpolation_factor(self): + """ + The default factor to increase resolution when doing parallel interpolation + """ + return self.data.metadata['fine_interpolation_factor'] + + @fine_interpolation_factor.setter + def fine_interpolation_factor(self, n): + """ + Set the default factor to increase resolution when doing parallel interpolation. + + Parameters + ----------- + n : int + Factor to increase parallel resolution by + """ + self.data.metadata['fine_interpolation_factor'] = n + def interpolate_parallel(self, region=None, *, n=None, toroidal_points=None, method='cubic', return_dataset=False): """ @@ -266,10 +285,7 @@ def interpolate_parallel(self, region=None, *, n=None, toroidal_points=None, aligned_input = True if n is None: - try: - n = self.data.metadata['fine_interpolation_factor'] - except KeyError: - n = 8 + n = self.fine_interpolation_factor da = da.bout.from_region(region.name, with_guards={xcoord: 0, ycoord: 2}) da = da.chunk({ycoord: None}) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 7a58a6bc..f61bcdf8 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -79,7 +79,15 @@ def getFieldAligned(self, name, caching=True): self.data[aligned_name] = self.data[name].bout.toFieldAligned() return self.data[aligned_name] - def set_parallel_interpolation_factor(self, n): + @property + def fine_interpolation_factor(self): + """ + The default factor to increase resolution when doing parallel interpolation + """ + return self.data.metadata['fine_interpolation_factor'] + + @fine_interpolation_factor.setter + def fine_interpolation_factor(self, n): """ Set the default factor to increase resolution when doing parallel interpolation. @@ -88,14 +96,11 @@ def set_parallel_interpolation_factor(self, n): n : int Factor to increase parallel resolution by """ - ds = self.data ds.metadata['fine_interpolation_factor'] = n for da in ds.values(): da.metadata['fine_interpolation_factor'] = n - return ds - def interpolate_parallel(self, variables, **kwargs): """ Interpolate in the parallel direction to get a higher resolution version of a diff --git a/xbout/load.py b/xbout/load.py index 0a1eaf9d..61af6d8e 100644 --- a/xbout/load.py +++ b/xbout/load.py @@ -168,6 +168,10 @@ def open_boutdataset(datapath='./BOUT.dmp.*.nc', inputfilepath=None, if run_name: ds.name = run_name + # Set some default settings that are only used in post-processing by xBOUT, not by + # BOUT++ + ds.bout.fine_interpolation_factor = 8 + if info is 'terse': print("Read in dataset from {}".format(str(Path(datapath)))) elif info: From c81a1433e9c6b79a5ab195818b3d1dd4018eda8e Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 4 Apr 2020 17:23:54 +0100 Subject: [PATCH 66/94] Small tidy-up Whitespace, ds.data_vars instead of ds.values(), missing variable name in exception. --- xbout/boutdataarray.py | 1 + xbout/boutdataset.py | 2 +- xbout/geometries.py | 6 +++--- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 711d7b1b..bee0836e 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -59,6 +59,7 @@ def to_dataset(self): def dropIfExists(ds, name): if name in ds.attrs: del ds.attrs[name] + dropIfExists(ds, 'direction_y') dropIfExists(ds, 'direction_z') diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index f61bcdf8..46dfa6f1 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -98,7 +98,7 @@ def fine_interpolation_factor(self, n): """ ds = self.data ds.metadata['fine_interpolation_factor'] = n - for da in ds.values(): + for da in ds.data_vars: da.metadata['fine_interpolation_factor'] = n def interpolate_parallel(self, variables, **kwargs): diff --git a/xbout/geometries.py b/xbout/geometries.py index 00f40d89..c33d26e6 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -201,9 +201,9 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): for v in needed_variables: if v not in ds: if grid is None: - raise ValueError("Grid file is required to provide %s. Pass the " - "grid file name as the 'gridfilepath' argument to " - "open_boutdataset().") + raise ValueError(f"Grid file is required to provide {v}. Pass the " + f"grid file name as the 'gridfilepath' argument to " + f"open_boutdataset().") ds[v] = grid[v] # Rename 't' if user requested it From e8ac8c6c60e5e14e434219879997df6996bd11bf Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 4 Apr 2020 22:33:12 +0100 Subject: [PATCH 67/94] Still need to use .values() to get DataArrays instead of names --- xbout/boutdataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 46dfa6f1..30a7622a 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -98,7 +98,7 @@ def fine_interpolation_factor(self, n): """ ds = self.data ds.metadata['fine_interpolation_factor'] = n - for da in ds.data_vars: + for da in ds.data_vars.values(): da.metadata['fine_interpolation_factor'] = n def interpolate_parallel(self, variables, **kwargs): From 37d1220c99312070d2efdfa2b650d5888b976c1c Mon Sep 17 00:00:00 2001 From: John Omotani Date: Thu, 9 Apr 2020 14:27:24 +0100 Subject: [PATCH 68/94] Fix use of fine_interpolation_factor in tests When registering "Schwarzschild" geometry, need to add attrs to 'event_horizon' variable. --- xbout/tests/test_boutdataset.py | 2 +- xbout/tests/test_grid.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index 48d60d70..cc58ae7f 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -95,7 +95,7 @@ def test_set_parallel_interpolation_factor(self): with pytest.raises(KeyError): ds['a'].metadata['fine_interpolation_factor'] - ds.bout.set_parallel_interpolation_factor(42) + ds.bout.fine_interpolation_factor = 42 assert ds.metadata['fine_interpolation_factor'] == 42 assert ds['a'].metadata['fine_interpolation_factor'] == 42 diff --git a/xbout/tests/test_grid.py b/xbout/tests/test_grid.py index 175d5951..d30361b4 100644 --- a/xbout/tests/test_grid.py +++ b/xbout/tests/test_grid.py @@ -60,6 +60,7 @@ def test_open_grid_apply_geometry(self, create_example_grid_file): @register_geometry(name="Schwarzschild") def add_schwarzschild_coords(ds, coordinates=None): ds['event_horizon'] = 4.0 + ds['event_horizon'].attrs = ds.attrs.copy() return ds example_grid = create_example_grid_file From acad60df1658b4c0770a09834c50e176ee20c963 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 20 Apr 2020 21:04:59 +0100 Subject: [PATCH 69/94] Always check if dy is in variables in BoutDataset.interpolate_parallel --- xbout/boutdataset.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 30a7622a..390dc953 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -135,11 +135,11 @@ def interpolate_parallel(self, variables, **kwargs): if variables is ...: variables = [v for v in self.data] - if 'dy' in variables: - # dy is treated specially, as it is converted to a coordinate, and then - # converted back again below, so must not call - # interpolate_parallel('dy'). - variables.remove('dy') + if 'dy' in variables: + # dy is treated specially, as it is converted to a coordinate, and then + # converted back again below, so must not call + # interpolate_parallel('dy'). + variables.remove('dy') if isinstance(variables, str): ds = self.data[variables].bout.interpolate_parallel(return_dataset=True, From a316fb2290868b6d40f2e8026c7d84b42ff7cfb6 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 20 Apr 2020 21:10:35 +0100 Subject: [PATCH 70/94] Simplify BoutDataset.interpolate_parallel() --- xbout/boutdataset.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 390dc953..68821eba 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -135,28 +135,29 @@ def interpolate_parallel(self, variables, **kwargs): if variables is ...: variables = [v for v in self.data] + + if isinstance(variables, str): + variables = [variables] + if 'dy' in variables: # dy is treated specially, as it is converted to a coordinate, and then # converted back again below, so must not call # interpolate_parallel('dy'). variables.remove('dy') - if isinstance(variables, str): - ds = self.data[variables].bout.interpolate_parallel(return_dataset=True, - **kwargs) - else: - # Need to start with a Dataset with attrs as merge() drops the attrs of the - # passed-in argument. - ds = self.data[variables[0]].bout.interpolate_parallel(return_dataset=True, - **kwargs) - for var in variables[1:]: - ds = ds.merge( - self.data[var].bout.interpolate_parallel(return_dataset=True, - **kwargs) - ) - # Add extra variables needed to make this a valid Dataset - ds['dx'] = self.data['dx'].bout.interpolate_parallel(**kwargs) + if 'dx' not in variables: + variables.append('dx') + + # Need to start with a Dataset with attrs as merge() drops the attrs of the + # passed-in argument. + ds = self.data[variables[0]].bout.interpolate_parallel(return_dataset=True, + **kwargs) + for var in variables[1:]: + ds = ds.merge( + self.data[var].bout.interpolate_parallel(return_dataset=True, + **kwargs) + ) # dy needs to be compatible with the new poloidal coordinate # dy was created as a coordinate in BoutDataArray.interpolate_parallel, here just From c74b94169a0e9b8e1263711c337c3b40b6c242ec Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 20 Apr 2020 22:11:43 +0100 Subject: [PATCH 71/94] Convert tuple to list if passed to BoutDataset.interpolate_parallel() tuples are immutable, so need to make 'variables' a list instead. --- xbout/boutdataset.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 68821eba..eebb9e05 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -138,6 +138,8 @@ def interpolate_parallel(self, variables, **kwargs): if isinstance(variables, str): variables = [variables] + if isinstance(variables, tuple): + variables = list(variables) if 'dy' in variables: # dy is treated specially, as it is converted to a coordinate, and then From 2e49738226f281a0c8aa4a5ce6d9749a79486311 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 20 Apr 2020 22:15:41 +0100 Subject: [PATCH 72/94] Check dimensions of variables to interpolate Especially when passing '...' to interpolate all variables, need to check wether variable in the variable list has both x- and y-dimenions, otherwise it can't be interpolated - and if it has no y-dimension, doesn't need to be interpolated. Also ensure the first variable added to the high-resolution Dataset has all dimensions of the original Dataset (or at least as many as possible) so that all coordinates get passed on to the high-resolution Dataset. --- xbout/boutdataset.py | 43 ++++++++++++++++++++++++++++++++++++------- 1 file changed, 36 insertions(+), 7 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index eebb9e05..45c79808 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -153,13 +153,42 @@ def interpolate_parallel(self, variables, **kwargs): # Need to start with a Dataset with attrs as merge() drops the attrs of the # passed-in argument. - ds = self.data[variables[0]].bout.interpolate_parallel(return_dataset=True, - **kwargs) - for var in variables[1:]: - ds = ds.merge( - self.data[var].bout.interpolate_parallel(return_dataset=True, - **kwargs) - ) + # Make sure the first variable has all dimensions so we don't lose any + # coordinates + def find_with_dims(first_var, dims): + if first_var is None: + dims = set(dims) + for v in variables: + if set(self.data[v].dims) == dims: + first_var = v + break + return first_var + tcoord = self.data.metadata.get("bout_tdim", "t") + zcoord = self.data.metadata.get("bout_zdim", "z") + first_var = find_with_dims(None, self.data.dims) + first_var = find_with_dims(first_var, set(self.data.dims) - set(tcoord)) + first_var = find_with_dims(first_var, set(self.data.dims) - set(zcoord)) + first_var = find_with_dims(first_var, set(self.data.dims) - set([tcoord, zcoord])) + if first_var is None: + raise ValueError( + f"Could not find variable to interpolate with both " + f"{ds.metadata.get('bout_xdim', 'x')} and " + f"{ds.metadata.get('bout_ydim', 'y')} dimensions" + ) + variables.remove(first_var) + ds = self.data[first_var].bout.interpolate_parallel(return_dataset=True, + **kwargs) + xcoord = ds.metadata.get("bout_xdim", "x") + ycoord = ds.metadata.get("bout_ydim", "y") + for var in variables: + da = self.data[var] + if xcoord in da.dims and ycoord in da.dims: + ds = ds.merge( + da.bout.interpolate_parallel(return_dataset=True, **kwargs) + ) + elif ycoord not in da.dims: + ds = ds.merge(da) + # Can't interpolate a variable that depends on y but not x, so just skip # dy needs to be compatible with the new poloidal coordinate # dy was created as a coordinate in BoutDataArray.interpolate_parallel, here just From 5f300ea54b1954f6e038644dacad20d8a4765e12 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 20 Apr 2020 22:21:02 +0100 Subject: [PATCH 73/94] Add test of ... arg: TestBoutDatasetMethods.test_interpolate_parallel --- xbout/tests/test_boutdataset.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index cc58ae7f..fc38daeb 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -102,8 +102,12 @@ def test_set_parallel_interpolation_factor(self): @pytest.mark.parametrize(params_guards, params_guards_values) @pytest.mark.parametrize(params_boundaries, params_boundaries_values) + @pytest.mark.parametrize( + "vars_to_interpolate", [('n', 'T'), pytest.param(..., marks=pytest.mark.long)] + ) def test_interpolate_parallel(self, tmpdir_factory, bout_xyt_example_files, - guards, keep_xboundaries, keep_yboundaries): + guards, keep_xboundaries, keep_yboundaries, + vars_to_interpolate): # This test checks that the regions created in the new high-resolution Dataset by # interpolate_parallel are correct. # This test does not test the accuracy of the parallel interpolation (there are @@ -122,7 +126,7 @@ def test_interpolate_parallel(self, tmpdir_factory, bout_xyt_example_files, keep_yboundaries=keep_yboundaries) # Get high parallel resolution version of ds, and check that - ds = ds.bout.interpolate_parallel(('n', 'T')) + ds = ds.bout.interpolate_parallel(vars_to_interpolate) mxg = guards['x'] myg = guards['y'] From 33735d3a86ae57c29397a2ada992c3f7bfc3da80 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 20 Apr 2020 22:47:37 +0100 Subject: [PATCH 74/94] PEP8 fixes --- xbout/boutdataset.py | 3 ++- xbout/tests/test_boutdataset.py | 7 ++++--- xbout/tests/test_region.py | 6 +++--- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 45c79808..c6dc8ca6 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -168,7 +168,8 @@ def find_with_dims(first_var, dims): first_var = find_with_dims(None, self.data.dims) first_var = find_with_dims(first_var, set(self.data.dims) - set(tcoord)) first_var = find_with_dims(first_var, set(self.data.dims) - set(zcoord)) - first_var = find_with_dims(first_var, set(self.data.dims) - set([tcoord, zcoord])) + first_var = find_with_dims(first_var, set(self.data.dims) + - set([tcoord, zcoord])) if first_var is None: raise ValueError( f"Could not find variable to interpolate with both " diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index fc38daeb..8a4f14fd 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -452,9 +452,10 @@ def test_interpolate_parallel_all_variables_arg(self, tmpdir_factory, interpolated_variables = [v for v in ds] - assert set(interpolated_variables) == set(('n', 'T', 'g11', 'g22', 'g33', 'g12', - 'g13', 'g23', 'g_11', 'g_22', 'g_33', 'g_12', 'g_13', 'g_23', 'G1', 'G2', - 'G3', 'J', 'Bxy', 'dx', 'dy')) + assert set(interpolated_variables) == set(( + 'n', 'T', 'g11', 'g22', 'g33', 'g12', 'g13', 'g23', 'g_11', 'g_22', 'g_33', + 'g_12', 'g_13', 'g_23', 'G1', 'G2', 'G3', 'J', 'Bxy', 'dx', 'dy' + )) class TestLoadInputFile: diff --git a/xbout/tests/test_region.py b/xbout/tests/test_region.py index 60fb2e41..de162127 100644 --- a/xbout/tests/test_region.py +++ b/xbout/tests/test_region.py @@ -1124,7 +1124,7 @@ def test_region_disconnecteddoublenull_get_one_guard( n_upper_inner_PFR.isel(theta=slice(yguards)).values) n_upper_inner_intersep = n.bout.from_region('upper_inner_intersep', - with_guards=with_guards) + with_guards=with_guards) # Remove attributes that are expected to be different del n_upper_inner_intersep.attrs['region'] @@ -1170,7 +1170,7 @@ def test_region_disconnecteddoublenull_get_one_guard( n_upper_outer_PFR.isel(theta=slice(-yguards, None)).values) n_upper_outer_intersep = n.bout.from_region('upper_outer_intersep', - with_guards=with_guards) + with_guards=with_guards) # Remove attributes that are expected to be different del n_upper_outer_intersep.attrs['region'] @@ -1273,7 +1273,7 @@ def test_region_disconnecteddoublenull_get_one_guard( n_lower_outer_PFR.isel(theta=slice(yguards)).values) n_lower_outer_intersep = n.bout.from_region('lower_outer_intersep', - with_guards=with_guards) + with_guards=with_guards) # Remove attributes that are expected to be different del n_lower_outer_intersep.attrs['region'] From c95f1941335ba90fa918fd9660abc7bb1b2a7256 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 21 Apr 2020 09:38:34 +0100 Subject: [PATCH 75/94] Assign DataArray to Dataset member instead of using merge() ds = ds.merge(da) works in latest xarray, but not slightly older versions. --- xbout/boutdataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index c6dc8ca6..edad2dea 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -188,7 +188,7 @@ def find_with_dims(first_var, dims): da.bout.interpolate_parallel(return_dataset=True, **kwargs) ) elif ycoord not in da.dims: - ds = ds.merge(da) + ds[var] = da # Can't interpolate a variable that depends on y but not x, so just skip # dy needs to be compatible with the new poloidal coordinate From 2acb61da56474d0b18ce78e261c25b82b5bc0829 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 21 Apr 2020 10:00:45 +0100 Subject: [PATCH 76/94] Avoid errors if regions were not created 'regions' should not be required generically for a BoutDataset or BoutDataArray, only for methods like poloidal plots which make use of them. --- xbout/boutdataset.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index edad2dea..cd2c6d32 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -276,7 +276,10 @@ def dict_to_attrs(obj, key): pass # Do not need to save regions as these can be reconstructed from the metadata - del to_save.attrs['regions'] + try: + del to_save.attrs['regions'] + except KeyError: + pass for var in chain(to_save.data_vars, to_save.coords): try: del to_save[var].attrs['regions'] From 1e4d47ac03bdb88af4e0e2eced8615db3dc9e528 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Tue, 21 Apr 2020 10:14:51 +0100 Subject: [PATCH 77/94] Better support for generic x and y in poloidal plots By default, the x and y coordinates for poloidal plots are "R" and "Z", but they are supposed to be allowed to be set to any 2d coordinates on the poloidal plane. This commit adds support for generic x and y to plot_separatrices, plot_targets and to the X-point region check in plot2d_wrapper. --- xbout/boutdataarray.py | 10 ++++++++++ xbout/boutdataset.py | 10 ++++++++++ xbout/plotting/animate.py | 4 ++-- xbout/plotting/plotfuncs.py | 32 ++++++++++++++++++++------------ xbout/plotting/utils.py | 26 +++++++++++++------------- 5 files changed, 55 insertions(+), 27 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index bee0836e..2d77a739 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -141,6 +141,16 @@ def fromFieldAligned(self): result["direction_y"] = "Standard" return result + @property + def regions(self): + if "regions" not in self.data.attrs: + raise ValueError( + "Called a method requiring regions, but these have not been created. " + "Please set the 'geometry' option when calling open_boutdataset() to " + "create regions." + ) + return self.data.attrs["regions"] + def from_region(self, name, with_guards=None): """ Get a logically-rectangular section of data from a certain region. diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index cd2c6d32..c0dd2d41 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -79,6 +79,16 @@ def getFieldAligned(self, name, caching=True): self.data[aligned_name] = self.data[name].bout.toFieldAligned() return self.data[aligned_name] + @property + def regions(self): + if "regions" not in self.data.attrs: + raise ValueError( + "Called a method requiring regions, but these have not been created. " + "Please set the 'geometry' option when calling open_boutdataset() to " + "create regions." + ) + return self.data.attrs["regions"] + @property def fine_interpolation_factor(self): """ diff --git a/xbout/plotting/animate.py b/xbout/plotting/animate.py index c1d7a7ac..e2149eaa 100644 --- a/xbout/plotting/animate.py +++ b/xbout/plotting/animate.py @@ -121,10 +121,10 @@ def animate_poloidal(da, *, ax=None, cax=None, animate_over='t', separatrix=True targets = False if separatrix: - plot_separatrices(da_regions, ax) + plot_separatrices(da_regions, ax, x=x, y=y) if targets: - plot_targets(da_regions, ax, hatching=add_limiter_hatching) + plot_targets(da_regions, ax, x=x, y=y, hatching=add_limiter_hatching) if animate: timeline = amp.Timeline(np.arange(da.sizes[animate_over]), fps=fps) diff --git a/xbout/plotting/plotfuncs.py b/xbout/plotting/plotfuncs.py index 7b293bb0..71687c52 100644 --- a/xbout/plotting/plotfuncs.py +++ b/xbout/plotting/plotfuncs.py @@ -213,12 +213,20 @@ def plot2d_wrapper(da, method, *, ax=None, separatrix=True, targets=True, raise ValueError('Argument passed to gridlines must be bool, int or ' 'slice. Got a ' + type(value) + ', ' + str(value)) - R_regions = [da_region['R'] for da_region in da_regions.values()] - Z_regions = [da_region['Z'] for da_region in da_regions.values()] - - for R, Z in zip(R_regions, Z_regions): - if (not da.metadata['bout_xdim'] in R.dims - and not da.metadata['bout_ydim'] in R.dims): + x_regions = [da_region[x] for da_region in da_regions.values()] + y_regions = [da_region[y] for da_region in da_regions.values()] + + for x, y in zip(x_regions, y_regions): + if ( + ( + not da.metadata['bout_xdim'] in x.dims + and not da.metadata['bout_ydim'] in x.dims + ) + or ( + not da.metadata['bout_xdim'] in y.dims + and not da.metadata['bout_ydim'] in y.dims + ) + ): # Small regions around X-point do not have segments in x- or y-directions, # so skip # Currently this region does not exist, but there is a small white gap at @@ -229,16 +237,16 @@ def plot2d_wrapper(da, method, *, ax=None, separatrix=True, targets=True, # form dim_order = (da.metadata['bout_xdim'], da.metadata['bout_ydim']) yarg = {da.metadata['bout_ydim']: gridlines['x']} - plt.plot(R.isel(**yarg).transpose(*dim_order, transpose_coords=True), - Z.isel(**yarg).transpose(*dim_order, transpose_coords=True), + plt.plot(x.isel(**yarg).transpose(*dim_order, transpose_coords=True), + y.isel(**yarg).transpose(*dim_order, transpose_coords=True), color='k', lw=0.1) if gridlines.get('y') is not None: xarg = {da.metadata['bout_xdim']: gridlines['y']} # Need to plot transposed arrays to make gridlines that go in the # y-direction dim_order = (da.metadata['bout_ydim'], da.metadata['bout_xdim']) - plt.plot(R.isel(**xarg).transpose(*dim_order, transpose_coords=True), - Z.isel(**yarg).transpose(*dim_order, transpose_coords=True), + plt.plot(x.isel(**xarg).transpose(*dim_order, transpose_coords=True), + y.isel(**yarg).transpose(*dim_order, transpose_coords=True), color='k', lw=0.1) ax.set_title(da.name) @@ -248,9 +256,9 @@ def plot2d_wrapper(da, method, *, ax=None, separatrix=True, targets=True, targets = False if separatrix: - plot_separatrices(da_regions, ax) + plot_separatrices(da_regions, ax, x=x, y=y) if targets: - plot_targets(da_regions, ax, hatching=add_limiter_hatching) + plot_targets(da_regions, ax, x=x, y=y, hatching=add_limiter_hatching) return artists diff --git a/xbout/plotting/utils.py b/xbout/plotting/utils.py index 66c2e956..af017986 100644 --- a/xbout/plotting/utils.py +++ b/xbout/plotting/utils.py @@ -73,7 +73,7 @@ def _is_core_only(da): return (ix1 >= nx and ix2 >= nx) -def plot_separatrices(da, ax): +def plot_separatrices(da, ax, *, x='R', y='Z'): """Plot separatrices""" if not isinstance(da, dict): @@ -90,14 +90,14 @@ def plot_separatrices(da, ax): inner = da_region.region.connection_inner_x if inner is not None: da_inner = da_regions[inner] - R = 0.5*(da_inner['R'].isel(**{xcoord: -1}) - + da_region['R'].isel(**{xcoord: 0})) - Z = 0.5*(da_inner['Z'].isel(**{xcoord: -1}) - + da_region['Z'].isel(**{xcoord: 0})) - ax.plot(R, Z, 'k--') + x_sep = 0.5*(da_inner[x].isel(**{xcoord: -1}) + + da_region[x].isel(**{xcoord: 0})) + y_sep = 0.5*(da_inner[y].isel(**{xcoord: -1}) + + da_region[y].isel(**{xcoord: 0})) + ax.plot(x_sep, y_sep, 'k--') -def plot_targets(da, ax, hatching=True): +def plot_targets(da, ax, *, x='R', y='Z', hatching=True): """Plot divertor and limiter target plates""" if not isinstance(da, dict): @@ -118,16 +118,16 @@ def plot_targets(da, ax, hatching=True): for da_region in da_regions.values(): if da_region.region.connection_lower_y is None: # lower target exists - R = da_region.coords['R'].isel(**{ycoord: y_boundary_guards}) - Z = da_region.coords['Z'].isel(**{ycoord: y_boundary_guards}) - [line] = ax.plot(R, Z, 'k-', linewidth=2) + x_target = da_region.coords[x].isel(**{ycoord: y_boundary_guards}) + y_target = da_region.coords[y].isel(**{ycoord: y_boundary_guards}) + [line] = ax.plot(x_target, y_target, 'k-', linewidth=2) if hatching: _add_hatching(line, ax) if da_region.region.connection_upper_y is None: # upper target exists - R = da_region.coords['R'].isel(**{ycoord: -y_boundary_guards - 1}) - Z = da_region.coords['Z'].isel(**{ycoord: -y_boundary_guards - 1}) - [line] = ax.plot(R, Z, 'k-', linewidth=2) + x_target = da_region.coords[x].isel(**{ycoord: -y_boundary_guards - 1}) + y_target = da_region.coords[y].isel(**{ycoord: -y_boundary_guards - 1}) + [line] = ax.plot(x_target, y_target, 'k-', linewidth=2) if hatching: _add_hatching(line, ax, reversed=True) From b6425450891c9fb9debd7c9f5e71ab5b18ce788a Mon Sep 17 00:00:00 2001 From: John Omotani Date: Wed, 29 Jul 2020 16:32:39 +0100 Subject: [PATCH 78/94] Require xarray-0.16.0, fix merging attrs in interpolate_parallel() Issue with merging attrs has been fixed in xarray-0.16.0, so can remove workaround, as well as fixing problem with inconsistent regions with new default compat="no_conflicts" for xarray's combine_by_coords(). --- requirements.txt | 2 +- setup.py | 2 +- xbout/boutdataarray.py | 23 ++++++++--------------- 3 files changed, 10 insertions(+), 17 deletions(-) diff --git a/requirements.txt b/requirements.txt index d293a93d..856733e7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -xarray >= 0.13.0 +xarray >= 0.16.0 dask[array] >= 1.0.0 natsort >= 5.5.0 matplotlib >= 3.1.1 diff --git a/setup.py b/setup.py index ec1f468b..45affb1f 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ license="Apache", python_requires='>=3.6', install_requires=[ - 'xarray>=v0.13.0', + 'xarray>=0.16.0', 'dask[array]>=1.0.0', 'natsort>=5.5.0', 'matplotlib>=3.1.1', diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 2d77a739..c2559f73 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -258,22 +258,15 @@ def interpolate_parallel(self, region=None, *, n=None, toroidal_points=None, method=method).bout.to_dataset() for region in self.data.regions] + # 'region' is not the same for all parts, and should not exist in the result, + # so delete before merging + for part in parts: + if 'region' in part.attrs: + del part.attrs['region'] + if 'region' in part[self.data.name].attrs: + del part[self.data.name].attrs['region'] + result = xr.combine_by_coords(parts) - result.attrs = parts[0].attrs - # xr.combine_by_coords does not keep attrs at the moment. See - # https://github.com/pydata/xarray/issues/3865 For now just copy the attrs - # from the first region. Can remove this workaround when the xarray issue is - # fixed. Should be able to use instead of the above just: - # result = xr.combine_by_coords( - # [self.interpolate_parallel(region, n=n, toroidal_points=toroidal_points, - # method=method).bout.to_dataset()] - # ) - - # result has all regions, so should not have a region attribute - if 'region' in result.attrs: - del result.attrs['region'] - if 'region' in result[self.data.name].attrs: - del result[self.data.name].attrs['region'] if return_dataset: return result From f313bc14c6cba6db49fe3e372abe4dba3b1cfd75 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Wed, 29 Jul 2020 19:25:22 +0100 Subject: [PATCH 79/94] Check for xcoord in updated_ds not ds The result to be returned is updated_ds, checking ds meant always adding a new xcoord to updated_ds, even if it was already added by add_geometry_coords(). --- xbout/geometries.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xbout/geometries.py b/xbout/geometries.py index cda7c974..5d98fb17 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -86,7 +86,7 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): updated_ds = updated_ds.rename({'t_array': tcoord}) updated_ds = updated_ds.set_coords(tcoord) - if xcoord not in ds.coords: + if xcoord not in updated_ds.coords: # Make index 'x' a coordinate, useful for handling global indexing # Note we have to use the index value, not the value calculated from 'dx' because # 'dx' may not be consistent between different regions (e.g. core and PFR). From d062fa9e75c02fbfdd46e5d1104b9b12f034448f Mon Sep 17 00:00:00 2001 From: John Omotani Date: Wed, 29 Jul 2020 19:27:36 +0100 Subject: [PATCH 80/94] More consistent attrs on coordinates Ensure 'metadata', 'options', 'regions' and 'geometry' attributes are always added to all coordinates. Ensures consistency between original and saved-and-reloaded Datasets, allowing some workarounds in tests to be removed. --- xbout/geometries.py | 8 +++++++- xbout/tests/test_boutdataset.py | 19 ------------------- xbout/utils.py | 20 ++++++++++++++++---- 3 files changed, 23 insertions(+), 24 deletions(-) diff --git a/xbout/geometries.py b/xbout/geometries.py index 5d98fb17..146742eb 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -5,7 +5,7 @@ import numpy as np from .region import Region, _create_regions_toroidal -from .utils import _set_attrs_on_all_vars +from .utils import _add_attrs_to_var, _set_attrs_on_all_vars REGISTERED_GEOMETRIES = {} @@ -94,6 +94,7 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): # add_geometry_coords, in which case we do not need this. nx = updated_ds.dims[xcoord] updated_ds = updated_ds.assign_coords(**{xcoord: np.arange(nx)}) + _add_attrs_to_var(updated_ds, xcoord) ny = updated_ds.dims[ycoord] # dy should always be constant in x, so it is safe to slice to x=0. # [The y-coordinate has to be a 1d coordinate that labels x-z slices of the grid @@ -109,6 +110,7 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): # calculate ycoord at the centre of each cell y = dy.cumsum(keep_attrs=True) - dy/2. updated_ds = updated_ds.assign_coords(**{ycoord: y.values}) + _add_attrs_to_var(updated_ds, ycoord) # If full data (not just grid file) then toroidal dim will be present if zcoord in updated_ds.dims: @@ -123,6 +125,7 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): z = xr.DataArray(np.linspace(start=z0, stop=z1, num=nz, endpoint=False), dims=zcoord) updated_ds = updated_ds.assign_coords(**{zcoord: z}) + _add_attrs_to_var(updated_ds, zcoord) return updated_ds @@ -199,6 +202,7 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): "file name as the 'gridfilepath' argument to " "open_boutdataset().") ds[v] = grid[v] + _add_attrs_to_var(ds, v) # Rename 't' if user requested it ds = ds.rename(t=coordinates['t']) @@ -210,6 +214,7 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): # Make index 'x' a coordinate, useful for handling global indexing nx = ds.dims['x'] ds = ds.assign_coords(x=np.arange(nx)) + _add_attrs_to_var(ds, 'x') ny = ds.dims[coordinates['y']] # dy should always be constant in x, so it is safe to slice to x=0. # [The y-coordinate has to be a 1d coordinate that labels x-z slices of the grid @@ -295,6 +300,7 @@ def add_s_alpha_geometry_coords(ds, *, coordinates=None, grid=None): "file name as the 'gridfilepath' argument to " "open_boutdataset().") ds['hthe'] = grid['hthe'] + _add_attrs_to_var(ds, 'hthe') else: hthe_from_grid = False ycoord = coordinates["y"] diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index 90322250..251bb117 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -525,16 +525,6 @@ def test_reload_all(self, tmpdir_factory, bout_xyt_example_files, geometry): # Load it again recovered = reload_boutdataset(savepath) - # Compare - for coord in original.coords.values(): - # Get rid of the options if they exist, because options are not dealt with - # totally consistently: they exist if a coord was created from a variable - # loaded from the BOUT++ output, but not if the coord was calculated from - # some parameters or loaded from a grid file - try: - del coord.attrs["options"] - except KeyError: - pass xrt.assert_identical(original.load(), recovered.load()) @pytest.mark.skip("saving and loading as float32 does not work") @@ -607,15 +597,6 @@ def test_reload_separate_variables( recovered = reload_boutdataset(savepath, pre_squashed=True) # Compare - for coord in original.coords.values(): - # Get rid of the options if they exist, because options are not dealt with - # totally consistently: they exist if a coord was created from a variable - # loaded from the BOUT++ output, but not if the coord was calculated from - # some parameters or loaded from a grid file - try: - del coord.attrs["options"] - except KeyError: - pass xrt.assert_identical(recovered, original) diff --git a/xbout/utils.py b/xbout/utils.py index 771a3533..59960b4e 100644 --- a/xbout/utils.py +++ b/xbout/utils.py @@ -1,4 +1,5 @@ from copy import deepcopy +from itertools import chain import numpy as np @@ -6,14 +7,25 @@ def _set_attrs_on_all_vars(ds, key, attr_data, copy=False): ds.attrs[key] = attr_data if copy: - for da in ds.values(): - da.attrs[key] = deepcopy(attr_data) + for v in chain(ds.data_vars, ds.coords): + ds[v].attrs[key] = deepcopy(attr_data) else: - for da in ds.values(): - da.attrs[key] = attr_data + for v in chain(ds.data_vars, ds.coords): + ds[v].attrs[key] = attr_data return ds +def _add_attrs_to_var(ds, varname, copy=False): + if copy: + for attr in ["metadata", "options", "geometry", "regions"]: + if attr in ds.attrs and attr not in ds[varname].attrs: + ds[varname].attrs[attr] = deepcopy(ds.attrs[attr]) + else: + for attr in ["metadata", "options", "geometry", "regions"]: + if attr in ds.attrs and attr not in ds[varname].attrs: + ds[varname].attrs[attr] = ds.attrs[attr] + + def _check_filetype(path): if path.suffix == '.nc': filetype = 'netcdf4' From 56a597ca2894c0b79d0187c4ef35881dc7c76f8a Mon Sep 17 00:00:00 2001 From: John Omotani Date: Thu, 30 Jul 2020 13:19:18 +0100 Subject: [PATCH 81/94] Fix performance regression in BoutDataArray.interpolate_parallel() Adding attrs to the 'ycoord' coordinate in d062fa9e75c02fbfdd46e5d1104b9b12f034448f made interpolate_parallel() very slow. Don't understand why, but adding 'da = da.compute()' before the interpolation restores the speed. --- xbout/boutdataarray.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index c2559f73..da67b573 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -311,6 +311,11 @@ def interpolate_parallel(self, region=None, *, n=None, toroidal_points=None, region.yupper + (ybndry_upper - 0.5)*dy, ny_fine + ybndry_lower + ybndry_upper) + # This prevents da.interp() from being very slow, but don't know why. + # Slow-down was introduced in d062fa9e75c02fbfdd46e5d1104b9b12f034448f when + # _add_attrs_to_var(updated_ds, ycoord) was added in geometries.py + da = da.compute() + da = da.interp({ycoord: y_fine.data}, assume_sorted=True, method=method, kwargs={'fill_value': 'extrapolate'}) From ed27defa9eb4b84e2cb903216a2dfba9039022dc Mon Sep 17 00:00:00 2001 From: John Omotani Date: Thu, 30 Jul 2020 14:03:27 +0100 Subject: [PATCH 82/94] Update Travis config for minimum versions to xarray-0.16.0 xarray-0.16.0 is required now, older versions will fail the tests. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 838ad04d..23b70fc8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,7 @@ python: - "3.7" env: - PIP_PACKAGES="setuptools pip pytest pytest-cov coverage codecov boutdata xarray!=0.14.0 numpy>=1.16.0" - - PIP_PACKAGES="setuptools pip pytest pytest-cov coverage codecov boutdata xarray==0.13.0 dask==1.0.0 numpy==1.16.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.1 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.16.0 as a workaround for some weird fails on Travis, in principle we should work with numpy>=1.13.3. + - PIP_PACKAGES="setuptools pip pytest pytest-cov coverage codecov boutdata xarray==0.16.0 dask==1.0.0 numpy==1.16.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.1 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.16.0 as a workaround for some weird fails on Travis, in principle we should work with numpy>=1.13.3. install: - pip install --upgrade ${PIP_PACKAGES} - pip install -r requirements.txt From dfab775981c696f100c15ffcdad878aae4939057 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Thu, 30 Jul 2020 14:52:46 +0100 Subject: [PATCH 83/94] Update minimum dask to minimum version supported by xarray-0.16.0 xarray requires less-than-6-months old dask, so 0.16.0 requires dask-2.10. --- .travis.yml | 2 +- requirements.txt | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 23b70fc8..1a042ff6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,7 @@ python: - "3.7" env: - PIP_PACKAGES="setuptools pip pytest pytest-cov coverage codecov boutdata xarray!=0.14.0 numpy>=1.16.0" - - PIP_PACKAGES="setuptools pip pytest pytest-cov coverage codecov boutdata xarray==0.16.0 dask==1.0.0 numpy==1.16.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.1 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.16.0 as a workaround for some weird fails on Travis, in principle we should work with numpy>=1.13.3. + - PIP_PACKAGES="setuptools pip pytest pytest-cov coverage codecov boutdata xarray==0.16.0 dask==2.10.0 numpy==1.16.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.1 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.16.0 as a workaround for some weird fails on Travis, in principle we should work with numpy>=1.13.3. install: - pip install --upgrade ${PIP_PACKAGES} - pip install -r requirements.txt diff --git a/requirements.txt b/requirements.txt index 856733e7..ef17102a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ xarray >= 0.16.0 -dask[array] >= 1.0.0 +dask[array] >= 2.10.0 natsort >= 5.5.0 matplotlib >= 3.1.1 animatplot >= 0.4.1 diff --git a/setup.py b/setup.py index 45affb1f..acff2c5f 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ python_requires='>=3.6', install_requires=[ 'xarray>=0.16.0', - 'dask[array]>=1.0.0', + 'dask[array]>=2.10.0', 'natsort>=5.5.0', 'matplotlib>=3.1.1', 'animatplot>=0.4.1', From ccf1017fd7e8f3169cf6b5c49d3ff5257f8eb943 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Thu, 30 Jul 2020 20:03:50 +0100 Subject: [PATCH 84/94] Better fix, BoutDataArray.interpolate_parallel() performance regression Removing attrs from y-coordinate means we do not need to call da.compute(), which would load the entire result into memory. It is better not to, as the result may be sliced or processed somehow later and we don't want to force loading in case the variable is too large to fit in memory. --- xbout/boutdataarray.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index da67b573..46184b54 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -311,10 +311,13 @@ def interpolate_parallel(self, region=None, *, n=None, toroidal_points=None, region.yupper + (ybndry_upper - 0.5)*dy, ny_fine + ybndry_lower + ybndry_upper) - # This prevents da.interp() from being very slow, but don't know why. + # This prevents da.interp() from being very slow. + # Apparently large attrs (i.e. regions) on a coordinate which is passed as an + # argument to dask.array.map_blocks() slow things down, maybe because coordinates + # are numpy arrays, not dask arrays? # Slow-down was introduced in d062fa9e75c02fbfdd46e5d1104b9b12f034448f when # _add_attrs_to_var(updated_ds, ycoord) was added in geometries.py - da = da.compute() + da[ycoord].attrs = {} da = da.interp({ycoord: y_fine.data}, assume_sorted=True, method=method, kwargs={'fill_value': 'extrapolate'}) From ff1bc302a138dc4f7eb6f764997fada96d58cbf1 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Thu, 30 Jul 2020 20:06:27 +0100 Subject: [PATCH 85/94] Clean up merge - remove duplicated code adding 1d coordinates This was intended to be moved from add_toroidal_geometry_coords() into apply_geometry(), but ended up being added back into add_toroidal_geometry_coords() in a merge. --- xbout/geometries.py | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/xbout/geometries.py b/xbout/geometries.py index 146742eb..68d3a556 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -210,27 +210,6 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): # Change names of dimensions to Orthogonal Toroidal ones ds = ds.rename(y=coordinates['y']) - # Add 1D Orthogonal Toroidal coordinates - # Make index 'x' a coordinate, useful for handling global indexing - nx = ds.dims['x'] - ds = ds.assign_coords(x=np.arange(nx)) - _add_attrs_to_var(ds, 'x') - ny = ds.dims[coordinates['y']] - # dy should always be constant in x, so it is safe to slice to x=0. - # [The y-coordinate has to be a 1d coordinate that labels x-z slices of the grid - # (similarly x-coordinate is 1d coordinate that labels y-z slices and z-coordinate is - # a 1d coordinate that labels x-y slices). A coordinate might have different values - # in disconnected regions, but there are no branch-cuts allowed in the x-direction in - # BOUT++ (at least for the momement), so the y-coordinate has to be 1d and - # single-valued. Therefore similarly dy has to be 1d and single-valued.] - # Need drop=True so that the result does not have an x-coordinate value which - # prevents it being added as a coordinate. - dy = ds['dy'].isel(x=0, drop=True) - - # calculate theta at the centre of each cell - theta = dy.cumsum(keep_attrs=True) - dy/2. - ds = ds.assign_coords(**{coordinates['y']: theta}) - # TODO automatically make this coordinate 1D in simplified cases? ds = ds.rename(psixy=coordinates['x']) ds = ds.set_coords(coordinates['x']) @@ -245,16 +224,6 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): # If full data (not just grid file) then toroidal dim will be present if 'z' in ds.dims: ds = ds.rename(z=coordinates['z']) - nz = ds.dims[coordinates['z']] - phi0 = 2*np.pi*ds.metadata['ZMIN'] - phi1 = phi0 + nz*ds.metadata['dz'] - if not np.isclose(phi1, 2.*np.pi*ds.metadata['ZMAX'], rtol=1.e-15, atol=0.): - warn(f"Size of toroidal domain as calculated from nz*dz ({phi1 - phi0}) is " - f"not the same as 2pi*(ZMAX - ZMIN) " - f"({2.*np.pi*ds.metadata['ZMAX'] - phi0}): using value from dz") - phi = xr.DataArray(np.linspace(start=phi0, stop=phi1, num=nz, endpoint=False), - dims=coordinates['z']) - ds = ds.assign_coords(**{coordinates['z']: phi}) # Record which dimension 'z' was renamed to. ds.metadata['bout_zdim'] = coordinates['z'] From 66b286a84da16cf2d7bedd39553b4496a7071a44 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Thu, 30 Jul 2020 20:08:06 +0100 Subject: [PATCH 86/94] Update coord attrs in _update_metadata_increased_resolution() The coordinates of a DataArray that has been interpolated will have attrs that are not consistent with the new DataArray. This commit updates _update_metadata_increased_resolution() to also replace the attrs of the DataArray's coords with the attrs of the new DataArray. --- xbout/utils.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/xbout/utils.py b/xbout/utils.py index 59960b4e..d190b460 100644 --- a/xbout/utils.py +++ b/xbout/utils.py @@ -87,4 +87,9 @@ def update_ny(name): update_ny('ny_inner') update_ny('MYSUB') + # Update attrs of coordinates to be consistent with da + for coord in da.coords: + da[coord].attrs = {} + _add_attrs_to_var(da, coord) + return da From 629aacf4bbf7c9ae4bbedd9593afad9a4b90389a Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 1 Aug 2020 21:57:14 +0100 Subject: [PATCH 87/94] Add checks for cell_location in toFieldAligned and fromFieldAligned Staggered grid cases are not implemented yet, would need to use zShift_CELL_XLOW or zShift_CELL_YLOW (which may or may not be present in the Dateset, depending on the PhysicsModel). --- xbout/boutdataarray.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 46184b54..4437b490 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -125,6 +125,14 @@ def toFieldAligned(self): if self.data.direction_y != "Standard": raise ValueError("Cannot shift a " + self.direction_y + " type field to " + "field-aligned coordinates") + if not ( + self.data.cell_location == "CELL_CENTRE" + or self.data.cell_location == "CELL_ZLOW" + ): + raise ValueError( + f"toFieldAligned does not support staggered grids yet, but " + f"location is {self.data.cell_location}." + ) result = self._shiftZ(self.data['zShift']) result["direction_y"] = "Aligned" return result @@ -137,6 +145,14 @@ def fromFieldAligned(self): if self.data.direction_y != "Aligned": raise ValueError("Cannot shift a " + self.direction_y + " type field to " + "field-aligned coordinates") + if not ( + self.data.cell_location == "CELL_CENTRE" + or self.data.cell_location == "CELL_ZLOW" + ): + raise ValueError( + f"fromFieldAligned does not support staggered grids yet, but " + f"location is {self.data.cell_location}." + ) result = self._shiftZ(-self.data['zShift']) result["direction_y"] = "Standard" return result From 66e6db9f58028abecb63c0451ac653b424551e38 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 1 Aug 2020 22:25:58 +0100 Subject: [PATCH 88/94] Add staggered zShift variables as coordinates if they exist --- xbout/geometries.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/xbout/geometries.py b/xbout/geometries.py index 68d3a556..2c153980 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -240,6 +240,18 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): ds = ds.set_coords('zShift') except KeyError: pass + try: + ds = ds.set_coords('zShift_CELL_XLOW') + except KeyError: + pass + try: + ds = ds.set_coords('zShift_CELL_YLOW') + except KeyError: + pass + try: + ds = ds.set_coords('zShift_CELL_ZLOW') + except KeyError: + pass ds = _create_regions_toroidal(ds) From 9494f2ebd6d0aa8b39ad12f10cdde09af4dceafe Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sat, 1 Aug 2020 22:30:17 +0100 Subject: [PATCH 89/94] set_coords() raises ValueError not KeyError when variable not present --- xbout/geometries.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/xbout/geometries.py b/xbout/geometries.py index 2c153980..ad839293 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -238,19 +238,19 @@ def add_toroidal_geometry_coords(ds, *, coordinates=None, grid=None): # Add zShift as a coordinate, so that it gets interpolated along with a variable try: ds = ds.set_coords('zShift') - except KeyError: + except ValueError: pass try: ds = ds.set_coords('zShift_CELL_XLOW') - except KeyError: + except ValueError: pass try: ds = ds.set_coords('zShift_CELL_YLOW') - except KeyError: + except ValueError: pass try: ds = ds.set_coords('zShift_CELL_ZLOW') - except KeyError: + except ValueError: pass ds = _create_regions_toroidal(ds) From 324179aab0ca766286de6363a9c86c3ea4ef066f Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sun, 16 Aug 2020 19:50:50 +0100 Subject: [PATCH 90/94] Allow toFieldAligned and fromFieldAligned when no cell_location attr Not all dump files (especially older ones) have cell_location attrs written, so if none is present, assume it's OK to do toFieldAligned and fromFieldAligned with the cell-centre zShift since we cannot check. --- xbout/boutdataarray.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 4437b490..4407a326 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -125,7 +125,7 @@ def toFieldAligned(self): if self.data.direction_y != "Standard": raise ValueError("Cannot shift a " + self.direction_y + " type field to " + "field-aligned coordinates") - if not ( + if hasattr(self.data, "cell_location") and not ( self.data.cell_location == "CELL_CENTRE" or self.data.cell_location == "CELL_ZLOW" ): @@ -145,7 +145,7 @@ def fromFieldAligned(self): if self.data.direction_y != "Aligned": raise ValueError("Cannot shift a " + self.direction_y + " type field to " + "field-aligned coordinates") - if not ( + if hasattr(self.data, "cell_location") and not ( self.data.cell_location == "CELL_CENTRE" or self.data.cell_location == "CELL_ZLOW" ): From af22f10d2b3834ae5ee9603f8e58410f09affc81 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sun, 16 Aug 2020 19:53:04 +0100 Subject: [PATCH 91/94] Add cell_location attr to variables in unit test Datasets --- xbout/tests/test_load.py | 1 + 1 file changed, 1 insertion(+) diff --git a/xbout/tests/test_load.py b/xbout/tests/test_load.py index 263d9180..f9bd59ff 100644 --- a/xbout/tests/test_load.py +++ b/xbout/tests/test_load.py @@ -322,6 +322,7 @@ def create_bout_ds(syn_data_type='random', lengths=(6, 2, 4, 7), num=0, nxpe=1, n = DataArray(data, dims=['t', 'x', 'y', 'z']) for v in [n, T]: v.attrs['direction_y'] = 'Standard' + v.attrs['cell_location'] = 'CELL_CENTRE' ds = Dataset({'n': n, 'T': T}) # BOUT_VERSION needed so that we know that number of points in z is MZ, not MZ-1 (as From 585330faceee692ac079f883aaeaf56b363671b8 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Sun, 16 Aug 2020 19:58:04 +0100 Subject: [PATCH 92/94] Remove drop('x') from add_s_alpha_geometry_coords() The index-value coordinates are now added for dimensions without coordinates after the geometry is applied, so no 'x' coordinate has been created to drop. --- xbout/geometries.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/xbout/geometries.py b/xbout/geometries.py index ad839293..9baa4506 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -292,8 +292,6 @@ def add_s_alpha_geometry_coords(ds, *, coordinates=None, grid=None): "geometry='s-alpha'") ds['r'] = ds['hthe'].isel({ycoord: 0}).squeeze(drop=True) ds['r'].attrs['units'] = 'm' - # remove x-index coordinate, don't need when we have 'r' as a radial coordinate - ds = ds.drop('x') ds = ds.set_coords('r') ds = ds.rename(x='r') ds.metadata['bout_xdim'] = 'r' From 31debc010987be7db04e28cdd492c94c95f61981 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 17 Aug 2020 19:49:23 +0100 Subject: [PATCH 93/94] Drop cell_location attr from Dataset in BoutDataArray.to_dataset() cell_location attribute only makes sense for a DataArray not a whole Dataset, so remove in to_dataset() method. --- xbout/boutdataarray.py | 1 + 1 file changed, 1 insertion(+) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 4407a326..1368c630 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -62,6 +62,7 @@ def dropIfExists(ds, name): dropIfExists(ds, 'direction_y') dropIfExists(ds, 'direction_z') + dropIfExists(ds, 'cell_location') return ds From 8e3e14af620e2a3715185edfa403d3fe82921174 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Mon, 17 Aug 2020 22:16:13 +0100 Subject: [PATCH 94/94] Add fsspec to PIP_PACKAGES in minimum versions Travis test Seems to be required at the moment to avoid an import error in the minimum-package-versions test. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 1a042ff6..9df27765 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,7 @@ python: - "3.7" env: - PIP_PACKAGES="setuptools pip pytest pytest-cov coverage codecov boutdata xarray!=0.14.0 numpy>=1.16.0" - - PIP_PACKAGES="setuptools pip pytest pytest-cov coverage codecov boutdata xarray==0.16.0 dask==2.10.0 numpy==1.16.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.1 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.16.0 as a workaround for some weird fails on Travis, in principle we should work with numpy>=1.13.3. + - PIP_PACKAGES="setuptools pip pytest pytest-cov coverage codecov boutdata xarray==0.16.0 dask==2.10.0 numpy==1.16.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.1 netcdf4==1.4.2 Pillow==6.1.0 fsspec" # test with oldest supported version of packages. Note, using numpy==1.16.0 as a workaround for some weird fails on Travis, in principle we should work with numpy>=1.13.3. We should not need to install fsspec explicitly, but at the moment are getting import errors in the tests due to fsspec not being present - should remove in future, probably when dask version is increased. install: - pip install --upgrade ${PIP_PACKAGES} - pip install -r requirements.txt