diff --git a/ci/environment.yml b/ci/environment.yml index 4bf29255..a565fa17 100644 --- a/ci/environment.yml +++ b/ci/environment.yml @@ -9,6 +9,7 @@ dependencies: - xarray - xoak - cartopy +- scipy < 1.11 - intake-xarray - geopy - xesmf > 0.6.3 diff --git a/oceanspy/llc_rearrange.py b/oceanspy/llc_rearrange.py index 75f7408c..b2b955c1 100644 --- a/oceanspy/llc_rearrange.py +++ b/oceanspy/llc_rearrange.py @@ -892,8 +892,6 @@ def flip_v(_ds, co_list=metrics, dims=True, _len=3): _ds[_varName] = -_ds[_varName] elif _varName == "SN": _ds[_varName] = -_ds[_varName] - # elif _varName == "CS": - # _ds[_varName] = -_ds[_varName] return _ds @@ -1232,6 +1230,256 @@ def llc_local_to_lat_lon(ds, co_list=metrics): return _ds +def arctic_eval(_ds, _ix, _iy, _dim_name="mooring"): + """ + Evaluates a dataset + at arctic face. _ix and _iy are vectors of index points, associated with + different locations around the face=6. + """ + + _ds = mates(_ds) + + y = DataArray( + _np.arange(1), + dims=("y"), + attrs={"long_name": "j-index of cell center", "units": "none"}, + ) + x = DataArray( + _np.arange(1), + dims=("x"), + attrs={"long_name": "i-index of cell center", "units": "none"}, + ) + yp1 = DataArray( + _np.arange(2), + dims=("yp1"), + attrs={"long_name": "j-index of cell corner", "units": "none"}, + ) + xp1 = DataArray( + _np.arange(2), + dims=("xp1"), + attrs={"long_name": "i-index of cell corner", "units": "none"}, + ) + + _XC = _ds["XC"].isel(face=6) + XR5 = _np.min(_XC.isel(Y=0, X=0).values), _np.max(_XC.isel(Y=0, X=-1).values) + XR2 = _np.min(_XC.isel(X=0, Y=-1).values), _np.max(_XC.isel(X=0, Y=0).values) + XR7 = _np.min(_XC.isel(X=-1, Y=0).values), _np.max(_XC.isel(X=-1, Y=-1).values) + XR10 = _np.min(_XC.isel(X=-1, Y=-1).values), _np.max(_XC.isel(X=0, Y=-1).values) + DS = [] + co_list = [var for var in _ds.coords] + + for i in range(len(_ix)): + _ix0, _iy0 = _np.array([_ix[i]]), _np.array([_iy[i]]) + p = _XC.isel(X=_ix[i], Y=_iy[i]).values + if p > XR2[0] and p < XR2[-1]: + new_dim = DataArray( + _np.arange(len(_ix0)), + dims=(_dim_name), + attrs={"long_name": "index of " + _dim_name, "units": "none"}, + ) + + # Transform indexes in DataArray + iY = DataArray( + _np.reshape(_iy0, (len(new_dim), len(y))), + coords={_dim_name: new_dim, "y": y}, + dims=(_dim_name, "y"), + ) + iX = DataArray( + _np.reshape(_ix0, (len(new_dim), len(x))), + coords={_dim_name: new_dim, "x": x}, + dims=(_dim_name, "x"), + ) + + iYp1 = DataArray( + _np.stack((_iy0, _iy0 + 1)[::-1], 1), + coords={_dim_name: new_dim, "yp1": yp1}, + dims=(_dim_name, "yp1"), + ) + iXp1 = DataArray( + _np.stack((_ix0, _ix0 + 1), 1), + coords={_dim_name: new_dim, "xp1": xp1}, + dims=(_dim_name, "xp1"), + ) + args = { + "X": iX, + "Y": iY, + "Xp1": iXp1, + "Yp1": iYp1, + "face": 6, + } + rename = {"yp1": "Xp1", "xp1": "Yp1", "x": "Y", "y": "X"} + new_ds = _ds.isel(**args).drop_vars(["Xp1", "Yp1", "X", "Y"]) + new_ds = new_ds.rename_dims(rename).rename_vars(rename) + new_ds = rotate_vars(new_ds) + + for _varName in new_ds.variables: + if "mate" in new_ds[_varName].attrs: + _dims = new_ds[_varName].dims + if _varName not in co_list and "Xp1" in _dims: + new_ds[_varName] = -new_ds[_varName] + if _varName == "SN": + new_ds[_varName] = -new_ds[_varName] + + elif p > XR5[0] and p < XR5[-1]: + new_dim = DataArray( + _np.arange(len(_ix0)), + dims=(_dim_name), + attrs={"long_name": "index of " + _dim_name, "units": "none"}, + ) + + # Transform indexes in DataArray + iY = DataArray( + _np.reshape(_iy0, (len(new_dim), len(y))), + coords={_dim_name: new_dim, "y": y}, + dims=(_dim_name, "y"), + ) + iX = DataArray( + _np.reshape(_ix0, (len(new_dim), len(x))), + coords={_dim_name: new_dim, "x": x}, + dims=(_dim_name, "x"), + ) + + iYp1 = DataArray( + _np.stack((_iy0, _iy0 + 1), 1), + coords={_dim_name: new_dim, "yp1": yp1}, + dims=(_dim_name, "yp1"), + ) + iXp1 = DataArray( + _np.stack((_ix0, _ix0 + 1), 1), + coords={_dim_name: new_dim, "xp1": xp1}, + dims=(_dim_name, "xp1"), + ) + args = { + "X": iX, + "Y": iY, + "Xp1": iXp1, + "Yp1": iYp1, + "face": 6, + } + rename = {"yp1": "Yp1", "xp1": "Xp1", "x": "X", "y": "Y"} + + new_ds = _ds.isel(**args).drop_vars(["Xp1", "Yp1", "X", "Y"]) + new_ds = new_ds.rename_dims(rename).rename_vars(rename) + + elif p > XR7[0] or p < XR7[-1]: + new_dim = DataArray( + _np.arange(len(_ix0)), + dims=(_dim_name), + attrs={"long_name": "index of " + _dim_name, "units": "none"}, + ) + + # Transform indexes in DataArray + iY = DataArray( + _np.reshape(_iy0, (len(new_dim), len(y))), + coords={_dim_name: new_dim, "y": y}, + dims=(_dim_name, "y"), + ) + iX = DataArray( + _np.reshape(_ix0, (len(new_dim), len(x))), + coords={_dim_name: new_dim, "x": x}, + dims=(_dim_name, "x"), + ) + + iYp1 = DataArray( + _np.stack((_iy0, _iy0 + 1), 1), + coords={_dim_name: new_dim, "yp1": yp1}, + dims=(_dim_name, "yp1"), + ) + + iXp1 = DataArray( + _np.stack((_ix0, _ix0 + 1)[::-1], 1), + coords={_dim_name: new_dim, "xp1": xp1}, + dims=(_dim_name, "xp1"), + ) + args = { + "X": iX, + "Y": iY, + "Xp1": iXp1, + "Yp1": iYp1, + "face": 6, + } + rename = {"yp1": "Xp1", "xp1": "Yp1", "x": "Y", "y": "X"} + new_ds = _ds.isel(**args).drop_vars(["Xp1", "Yp1", "X", "Y"]) + new_ds = new_ds.rename_dims(rename).rename_vars(rename) + new_ds = rotate_vars(new_ds) + + for _varName in new_ds.variables: + if "mate" in new_ds[_varName].attrs: + _dims = new_ds[_varName].dims + if _varName not in co_list and "Yp1" in _dims: + new_ds[_varName] = -new_ds[_varName] + if _varName == "CS": + new_ds[_varName] = -new_ds[_varName] + + elif p > XR10[0] and p < XR10[-1]: + new_dim = DataArray( + _np.arange(len(_ix0)), + dims=(_dim_name), + attrs={"long_name": "index of " + _dim_name, "units": "none"}, + ) + + # Transform indexes in DataArray + iY = DataArray( + _np.reshape(_iy0, (len(new_dim), len(y))), + coords={_dim_name: new_dim, "y": y}, + dims=(_dim_name, "y"), + ) + iX = DataArray( + _np.reshape(_ix0, (len(new_dim), len(x))), + coords={_dim_name: new_dim, "x": x}, + dims=(_dim_name, "x"), + ) + + iYp1 = DataArray( + _np.stack((_iy0, _iy0 + 1)[::-1], 1), + coords={_dim_name: new_dim, "yp1": yp1}, + dims=(_dim_name, "yp1"), + ) + + iXp1 = DataArray( + _np.stack((_ix0, _ix0 + 1)[::-1], 1), + coords={_dim_name: new_dim, "xp1": xp1}, + dims=(_dim_name, "xp1"), + ) + args = { + "X": iX, + "Y": iY, + "Xp1": iXp1, + "Yp1": iYp1, + "face": 6, + } + rename = {"yp1": "Yp1", "xp1": "Xp1", "x": "X", "y": "Y"} + + new_ds = _ds.isel(**args).drop_vars(["Xp1", "Yp1", "X", "Y"]) + new_ds = new_ds.rename_dims(rename).rename_vars(rename) + + for _varName in new_ds.variables: + if "mate" in new_ds[_varName].attrs: + _dims = new_ds[_varName].dims + if _varName not in co_list and ("Yp1" in _dims or "Xp1" in _dims): + new_ds[_varName] = -new_ds[_varName] + if _varName == "SN": + new_ds[_varName] = -new_ds[_varName] + if _varName == "CS": + new_ds[_varName] = -new_ds[_varName] + + DS.append(new_ds) + dsf = DS[0].reset_coords() + if len(DS) > 1: + for i in range(1, len(DS)): + nmds = reset_dim(DS[i], i, dim="station") + dsf = dsf.combine_first(nmds.reset_coords()) + return dsf.set_coords(co_list) + + +def reset_dim(_ds, N, dim="mooring"): + """resets the dimension mooring by shifting it by a value set by N""" + _ds["n" + dim] = N + _ds[dim] + _ds = _ds.swap_dims({dim: "n" + dim}).drop_vars(dim).rename({"n" + dim: dim}) + + return _ds + + class Dims: """Creates a shortcut for dimension`s names associated with an arbitrary variable.""" diff --git a/oceanspy/plot.py b/oceanspy/plot.py index f9eb19d2..787b65b3 100644 --- a/oceanspy/plot.py +++ b/oceanspy/plot.py @@ -32,6 +32,7 @@ from .compute import _add_missing_variables from .compute import integral as _integral from .compute import weighted_mean as _weighted_mean +from .llc_rearrange import Dims # Additional dependencies (private) try: @@ -651,6 +652,20 @@ def horizontal_section( col_wrap = kwargs.pop("col_wrap", None) subplot_kws = kwargs.pop("subplot_kws", None) transform = kwargs.pop("transform", None) + xstep, ystep = kwargs.pop("xstep", None), kwargs.pop("ystep", None) + + DIMS = [dim for dim in da.dims if dim[0] in ["X", "Y"]] + dims_var = Dims(DIMS[::-1]) + + if xstep is not None and ystep is not None: + xslice = slice(0, len(da[dims_var.X]), xstep) + yslice = slice(0, len(da[dims_var.Y]), ystep) + else: + xslice = slice(0, len(da[dims_var.X])) + yslice = slice(0, len(da[dims_var.Y])) + + sargs = {dims_var.X: xslice, dims_var.Y: yslice} + da = da.isel(**sargs) # Projection if ax is None: @@ -937,6 +952,18 @@ def vertical_section( ver_name = [dim for dim in od.grid_coords["Z"] if dim in da.dims][0] da = da.squeeze() + # slicing along section + step = kwargs.pop("step", None) + DIMS = [dim for dim in da.dims] + dims_var = Dims(DIMS[::-1]) + if step is not None and dims_var.X in ["mooring", "station"]: + xslice = slice(0, len(da[dims_var.X]), step) + else: + xslice = slice(0, len(da[dims_var.X])) + + sargs = {dims_var.X: xslice} + da = da.isel(**sargs) + # CONTOURNAME if contourName is not None: # Apply mean and sum diff --git a/oceanspy/subsample.py b/oceanspy/subsample.py index 032593f8..95a5290c 100644 --- a/oceanspy/subsample.py +++ b/oceanspy/subsample.py @@ -35,6 +35,7 @@ _rename_aliased, ) from .llc_rearrange import LLCtransformation as _llc_trans +from .llc_rearrange import arctic_eval, reset_dim, rotate_vars from .utils import _rel_lon, _reset_range, circle_path_array, get_maskH # Recommended dependencies (private) @@ -782,64 +783,9 @@ def remove_repeated(_iX, _iY): # attempt to remove repeated (but not adjacent) coord values ix, iy = remove_repeated(ix, iy) - mooring = DataArray( - _np.arange(len(ix)), - dims=("mooring"), - attrs={"long_name": "index of mooring", "units": "none"}, - ) - y = DataArray( - _np.arange(1), - dims=("y"), - attrs={"long_name": "j-index of cell center", "units": "none"}, - ) - x = DataArray( - _np.arange(1), - dims=("x"), - attrs={"long_name": "i-index of cell corner", "units": "none"}, - ) - yp1 = DataArray( - _np.arange(2), - dims=("yp1"), - attrs={"long_name": "j-index of cell center", "units": "none"}, - ) - xp1 = DataArray( - _np.arange(2), - dims=("xp1"), - attrs={"long_name": "i-index of cell corner", "units": "none"}, - ) + new_ds = eval_dataset(ds, ix, iy) - # Transform indexes in DataArray - iY = DataArray( - _np.reshape(iy, (len(mooring), len(y))), - coords={"mooring": mooring, "y": y}, - dims=("mooring", "y"), - ) - iX = DataArray( - _np.reshape(ix, (len(mooring), len(x))), - coords={"mooring": mooring, "x": x}, - dims=("mooring", "x"), - ) - iYp1 = DataArray( - _np.stack((iy, iy + 1), 1), - coords={"mooring": mooring, "yp1": yp1}, - dims=("mooring", "yp1"), - ) - iXp1 = DataArray( - _np.stack((ix, ix + 1), 1), - coords={"mooring": mooring, "xp1": xp1}, - dims=("mooring", "xp1"), - ) - - args = { - "X": iX, - "Y": iY, - "Xp1": iXp1, - "Yp1": iYp1, - } - - rename = {"x": "X", "y": "Y", "yp1": "Yp1", "xp1": "Xp1"} - new_ds = ds.isel(**args).drop_vars(["X", "Y", "Xp1", "Yp1"]) - new_ds = new_ds.rename_dims(rename).rename_vars(rename) + mooring = new_ds.mooring near_Y = new_ds["YC"].compute().data.squeeze() near_X = new_ds["XC"].compute().data.squeeze() @@ -1130,6 +1076,176 @@ def survey_stations( return od +def stations( + od, + varList=None, + tcoords=None, + Zcoords=None, + Ycoords=None, + Xcoords=None, + xoak_index="scipy_kdtree", + method="nearest", +): + """ + Extract stations using nearest-neighbor lookup. + + Parameters + ---------- + od: OceanDataset + od that will be subsampled. + tcoords: 1D array_like, NoneType + time-coordinates (datetime). + Zcoords: 1D array_like, NoneType + Z coordinates at center point + Ycoords: 1D array_like, NoneType + Latitude coordinates of locations at center point. + Xcoords: 1D array_like, NoneType + lon coordinates of locations at center point. + xoak_index: str + xoak index to be used. `scipy_kdtree` by default. + + Returns + ------- + od: OceanDataset + Subsampled oceandataset. + + See Also + -------- + oceanspy.OceanDataset.mooring + + """ + _check_native_grid(od, "stations") + + # Convert variables to numpy arrays and make some check + tcoords = _check_range(od, tcoords, "timeRange") + Zcoords = _check_range(od, Zcoords, "Zcoords") + Ycoords = _check_range(od, Ycoords, "Ycoords") + Xcoords = _check_range(od, Xcoords, "Xcoords") + + # Message + print("Extracting stations.") + + # Unpack ds + od = _copy.copy(od) + ds = od._ds + + if varList is not None: + nvarlist = [var for var in ds.data_vars if var not in varList] + ds = ds.drop_vars(nvarlist) + + # look up nearest neighbors in Z and time dims + Zlist = ["Zl", "Z", "Zp1", "Zu"] + tlist = ["time", "time_midp"] + dimlist, Coords = [], [] + if Zcoords is not None: + dimlist.append(Zlist) + Coords.append(Zcoords) + if tcoords is not None: + dimlist.append(tlist) + Coords.append(tcoords) + + for i in range(len(dimlist)): + List = [k for k in dimlist[i] if k in ds.dims] + args = {} + for item in List: + if len(ds[item]) > 0: + args[item] = Coords[i] + ds = ds.sel(**args, method="nearest") + + # create list of coordinates. + co_list = [var for var in ds.coords if var not in ["face"]] + + if Xcoords is None and Ycoords is None: + DS = ds + + if Xcoords is not None and Ycoords is not None: + if not ds.xoak.index: + if xoak_index not in _xoak.IndexRegistry(): + raise ValueError( + "`xoak_index` [{}] is not supported." + "\nAvailable options: {}" + "".format(xoak_index, _xoak.IndexRegistry()) + ) + + ds.xoak.set_index(["XC", "YC"], xoak_index) + + cdata = {"XC": ("station", Xcoords), "YC": ("station", Ycoords)} + ds_data = _xr.Dataset(cdata) + + # find nearest points to given data. + nds = ds.xoak.sel(XC=ds_data["XC"], YC=ds_data["YC"]) + + if "face" not in ds.dims: + iX, iY = (nds[f"{i}"].data for i in ("X", "Y")) + DS = eval_dataset(ds, iX, iY, _dim_name="station") + DS = DS.squeeze() + + if "face" in ds.dims: + iX, iY, iface = (nds[f"{i}"].data for i in ("X", "Y", "face")) + + _dat = nds.face.values + ll = _np.where(abs(_np.diff(_dat)))[0] + order_iface = [_dat[i] for i in ll] + [_dat[-1]] + Niter = len(order_iface) + + if Niter == 1: + X0, Y0 = iX, iY + DS = eval_dataset(ds, X0, Y0, order_iface, "station") + DS = DS.squeeze() + + else: + # split indexes along each face + X0, Y0 = [], [] + for ii in range(len(ll) + 1): + if ii == 0: + x0, y0 = iX[: ll[ii] + 1], iY[: ll[ii] + 1] + elif ii > 0 and ii < len(ll): + x0, y0 = ( + iX[ll[ii - 1] + 1 : ll[ii] + 1], + iY[ll[ii - 1] + 1 : ll[ii] + 1], + ) + elif ii == len(ll): + x0, y0 = iX[ll[ii - 1] + 1 :], iY[ll[ii - 1] + 1 :] + X0.append(x0) + Y0.append(y0) + + DS = [] + for i in range(Niter): + DS.append(eval_dataset(ds, X0[i], Y0[i], order_iface[i], "station")) + + _dim = "station" + nDS = [DS[0].reset_coords()] + for i in range(1, len(DS)): + Nend = nDS[i - 1][_dim].values[-1] + nDS.append(reset_dim(DS[i], Nend + 1, dim=_dim).reset_coords()) + + DS = nDS[0] + for i in range(1, len(nDS)): + DS = DS.combine_first(nDS[i]) + + DS = DS.set_coords(co_list) + + if Xcoords is None and Ycoords is None: + od._ds = DS + + if Xcoords is not None and Ycoords is not None: + if "face" in DS.variables: + DS = DS.drop_vars(["face"]) + + od._ds = DS + + if od.face_connections is not None: + new_face_connections = {"face_connections": {None: {None, None}}} + od = od.set_face_connections(**new_face_connections) + + grid_coords = od.grid_coords + grid_coords.pop("X", None) + grid_coords.pop("Y", None) + od = od.set_grid_coords(grid_coords, overwrite=True) + + return od + + def particle_properties(od, times, Ypart, Xpart, Zpart, **kwargs): """ Extract Eulerian properties of particles @@ -1307,6 +1423,113 @@ def particle_properties(od, times, Ypart, Xpart, Zpart, **kwargs): return od +def eval_dataset(_ds, _ix, _iy, _iface=None, _dim_name="mooring"): + """ + Evaluates a dataset along (spatial) trajectory in the plane as defined by the + indexes in the plane. As a result, there is a new dimension/coordinate, hence + reducing the dimension of the original dataset. + + Parameters: + ---------- + _ds: xarray.Dataset + contains all x, y coordinates (but may be subsampled in Z or time) + _ix, _iy: 1D array, int + index values identifying the location in X Y (lat, lon) space + _iface: int, None (bool) + None (default) implies no complex topology in the dataset. Otherwise, + _iface indicates the face index which, along which the provided ix, iy, + identify the spatial (geo) coordinate location in lat/lon space. + _dim_name: str + names the new dimension along the pathway. By default this is 'mooring', + but can also be 'station' (when discrete, argo-like isolated coordinates). + + Returns: + xarray.Dataset + + """ + + new_dim = DataArray( + _np.arange(len(_ix)), + dims=(_dim_name), + attrs={"long_name": "index of " + _dim_name, "units": "none"}, + ) + y = DataArray( + _np.arange(1), + dims=("y"), + attrs={"long_name": "j-index of cell center", "units": "none"}, + ) + x = DataArray( + _np.arange(1), + dims=("x"), + attrs={"long_name": "i-index of cell center", "units": "none"}, + ) + yp1 = DataArray( + _np.arange(2), + dims=("yp1"), + attrs={"long_name": "j-index of cell corner", "units": "none"}, + ) + xp1 = DataArray( + _np.arange(2), + dims=("xp1"), + attrs={"long_name": "i-index of cell corner", "units": "none"}, + ) + + # Transform indexes in DataArray + iY = DataArray( + _np.reshape(_iy, (len(new_dim), len(y))), + coords={_dim_name: new_dim, "y": y}, + dims=(_dim_name, "y"), + ) + iX = DataArray( + _np.reshape(_ix, (len(new_dim), len(x))), + coords={_dim_name: new_dim, "x": x}, + dims=(_dim_name, "x"), + ) + + iYp1 = DataArray( + _np.stack((_iy, _iy + 1), 1), + coords={_dim_name: new_dim, "yp1": yp1}, + dims=(_dim_name, "yp1"), + ) + + iXp1 = DataArray( + _np.stack((_ix, _ix + 1), 1), + coords={_dim_name: new_dim, "xp1": xp1}, + dims=(_dim_name, "xp1"), + ) + + if _iface is not None: + if _iface == [6]: + return arctic_eval(_ds, _ix, _iy, _dim_name) + elif _iface in _np.arange(7, 13): + iXp1 = DataArray( + _np.stack((_ix + 1, _ix), 1), + coords={_dim_name: new_dim, "xp1": xp1}, + dims=(_dim_name, "xp1"), + ) + + args = { + "X": iX, + "Y": iY, + "Xp1": iXp1, + "Yp1": iYp1, + } + + rename = {"yp1": "Yp1", "xp1": "Xp1", "x": "X", "y": "Y"} + + if _iface is not None: + args = {"face": _iface, **args} + if _iface in _np.arange(7, 13): + rename = {"yp1": "Xp1", "xp1": "Yp1", "x": "Y", "y": "X"} + + new_ds = _ds.isel(**args).drop_vars(["Xp1", "Yp1", "X", "Y"]) + new_ds = new_ds.rename_dims(rename).rename_vars(rename) + if _iface is not None and _iface in _np.arange(7, 13): + new_ds = rotate_vars(new_ds) + + return new_ds + + class _subsampleMethods(object): """ Enables use of functions as OceanDataset attributes. @@ -1327,6 +1550,10 @@ def mooring_array(self, **kwargs): def survey_stations(self, **kwargs): return survey_stations(self._od, **kwargs) + @_functools.wraps(stations) + def stations(self, **kwargs): + return stations(self._od, **kwargs) + @_functools.wraps(particle_properties) def particle_properties(self, **kwargs): return particle_properties(self._od, **kwargs) diff --git a/oceanspy/tests/test_plot.py b/oceanspy/tests/test_plot.py index ba85a319..f4e87ced 100644 --- a/oceanspy/tests/test_plot.py +++ b/oceanspy/tests/test_plot.py @@ -231,7 +231,8 @@ def test_hor_sec_warn(od_in): @pytest.mark.parametrize("od_in", [od]) @pytest.mark.parametrize("varName", ["Temp", "U", "V", "momVort3"]) @pytest.mark.parametrize("contourName", ["Depth", "U", "V", "momVort3"]) -def test_hor_sec(od_in, varName, contourName): +@pytest.mark.parametrize("step", [None, 2]) +def test_hor_sec(od_in, varName, contourName, step): plt.close() cutout_kwargs = { "timeRange": [od_in.dataset["time"][0].values, od_in.dataset["time"][-1].values] @@ -250,6 +251,8 @@ def test_hor_sec(od_in, varName, contourName): contour_kwargs=contour_kwargs, clabel_kwargs=clabel_kwargs, cutout_kwargs=cutout_kwargs, + xstep=step, + ystep=step, ) assert isinstance(ax, plt.Axes) @@ -328,7 +331,8 @@ def test_ver_sec_subsamp(od_in, subsampMethod): @pytest.mark.parametrize("od_in", [od_moor, od_surv]) @pytest.mark.parametrize("varName", ["Temp", "U", "V", "W", "momVort3"]) @pytest.mark.parametrize("contourName", ["Temp", "U", "V", "W", "momVort3"]) -def test_ver_sec(od_in, varName, contourName): +@pytest.mark.parametrize("step", [None, 2]) +def test_ver_sec(od_in, varName, contourName, step): plt.close() if "mooring_dist" in od_in.dataset.variables: ds = od_in.dataset.drop_vars("mooring_dist") @@ -351,6 +355,7 @@ def test_ver_sec(od_in, varName, contourName): intAxes=intAxes, contour_kwargs=contour_kwargs, clabel_kwargs=clabel_kwargs, + step=step, ) assert isinstance(ax, plt.Axes) diff --git a/oceanspy/tests/test_subsample.py b/oceanspy/tests/test_subsample.py index c8b91b77..09df0869 100644 --- a/oceanspy/tests/test_subsample.py +++ b/oceanspy/tests/test_subsample.py @@ -1,4 +1,6 @@ # TODO: cartesian, and Xp1 Yp1 right are not tested. +import copy as _copy + import numpy as np import pytest import xarray as xr @@ -6,6 +8,7 @@ # From OceanSpy from oceanspy import OceanDataset, open_oceandataset +from oceanspy.llc_rearrange import mates # Directory Datadir = "./oceanspy/tests/Data/" @@ -424,6 +427,80 @@ def test_survey(od, cartesian, delta, kwargs): new_od.grid +# ======== +# STATIONS +# ======== +# create cyclonic vel +nU = _copy.deepcopy(ECCOod._ds["CS"].values) +nV = -_copy.deepcopy(ECCOod._ds["SN"].values) +Ucoords = { + "face": ECCOod._ds.face.values, + "Y": ECCOod._ds.Y.values, + "Xp1": ECCOod._ds.Xp1.values, +} +Vcoords = { + "face": ECCOod._ds.face.values, + "Yp1": ECCOod._ds.Yp1.values, + "X": ECCOod._ds.X.values, +} +ECCOod._ds["UVELMASS"] = xr.DataArray(nU, coords=Ucoords, dims=["face", "Y", "Xp1"]) +ECCOod._ds["VVELMASS"] = xr.DataArray(nV, coords=Vcoords, dims=["face", "Yp1", "X"]) + +ECCOod._ds = mates(ECCOod._ds) + +# coordinate locations +lons76N = np.array([6.2, 97.8, -172.21623, -83.78377]) +lats76N = 76 * np.ones(np.shape(lons76N)) + +lats_6E = np.array([60.131752, 65.39574, 70.104706, 74.43877, 78.69695, 82.68835]) +lons6E = 6 * np.ones(np.shape(lats_6E)) + +lons90W = -87.5 * np.ones(np.shape(lats_6E)) + +lons170W = -170 * np.ones(np.shape(lats_6E)) + + +@pytest.mark.parametrize("this_od", [ECCOod]) +@pytest.mark.parametrize( + "args", + [ + {"Ycoords": None, "Xcoords": None}, + {"Ycoords": lats76N, "Xcoords": lons76N}, + {"Ycoords": lats_6E, "Xcoords": lons6E}, + {"Ycoords": lats_6E, "Xcoords": lons90W}, + {"Ycoords": lats_6E, "Xcoords": lons170W}, + ], +) +def test_stations(this_od, args): + od_stns = this_od.subsample.stations(**args) + + if args["Ycoords"] is None or args["Xcoords"] is None: + assert this_od._ds.XC.shape == od_stns._ds.XC.shape + assert this_od._ds.YC.shape == od_stns._ds.YC.shape + else: + YC, XC = args["Ycoords"], args["Xcoords"] + XCstn, YCstn = od_stns._ds.XC, od_stns._ds.YC + XGstn, YGstn = od_stns._ds.XG, od_stns._ds.YG + stations = od_stns._ds.station.values + argsu0, argsu1 = {"Xp1": 0}, {"Xp1": 1} + argsv0, argsv1 = {"Yp1": 0}, {"Yp1": 1} + assert len(stations) == len(XC) == len(YC) + for i in range(len(stations)): + assert XGstn.isel(station=i, Xp1=0, Yp1=0).values < XCstn.isel(station=i) + assert XGstn.isel(station=i, Xp1=1, Yp1=0).values > XCstn.isel(station=i) + assert YGstn.isel(station=i, Xp1=0, Yp1=0).values < YCstn.isel(station=i) + assert YGstn.isel(station=i, Xp1=0, Yp1=1).values > YCstn.isel(station=i) + + Uval0 = od_stns._ds["UVELMASS"].isel(**{**{"station": i}, **argsu0}).values + Uval1 = od_stns._ds["UVELMASS"].isel(**{**{"station": i}, **argsu1}).values + Vval0 = od_stns._ds["VVELMASS"].isel(**{**{"station": i}, **argsv0}).values + Vval1 = od_stns._ds["VVELMASS"].isel(**{**{"station": i}, **argsv1}).values + + assert np.round(abs(Uval0), 1) == np.round(abs(Uval1), 1) == 1 + assert np.round(abs(Vval0), 1) <= 0.1 + assert np.round(abs(Vval1), 1) <= 0.1 + + # ========= # PARTICLES # ========= diff --git a/oceanspy/utils.py b/oceanspy/utils.py index 2b5b8a77..2d52d0fb 100644 --- a/oceanspy/utils.py +++ b/oceanspy/utils.py @@ -70,20 +70,18 @@ def viewer_to_range(p): if p_type == "Polygon": coords = p[0]["coordinates"][0] elif p_type == "Point": - coords = p[0]["coordinates"] + coords = [] + for i in range(len(p)): + coords.append(p[i]["coordinates"]) elif p_type == "LineString": coords = p[0]["coordinates"] lon = [] lat = [] - if p_type == "Point": - lon.append(coords[0]) - lat.append(coords[1]) - else: - for i in range(len(coords)): - lon.append(coords[i][0]) - lat.append(coords[i][1]) + for i in range(len(coords)): + lon.append(coords[i][0]) + lat.append(coords[i][1]) # check that there are no lon values greater than 180 (abs) ll = _np.where(abs(_np.array(lon)) > 180)[0]