Skip to content

Commit

Permalink
Update DevOps to cache conda and fix attributes not being preserved w…
Browse files Browse the repository at this point in the history
…ith `xarray > 2023.3.0` (#465)
  • Loading branch information
tomvothecoder authored May 11, 2023
1 parent cbef082 commit b896c8a
Show file tree
Hide file tree
Showing 13 changed files with 285 additions and 931 deletions.
40 changes: 26 additions & 14 deletions .github/workflows/build_workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,32 +62,44 @@ jobs:
- if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
uses: actions/checkout@v3

- if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
name: Cache Conda
uses: actions/cache@v3
env:
# Increase this value to reset cache if conda-env/ci.yml has not changed in the workflow
CACHE_NUMBER: 0
with:
path: ~/conda_pkgs_dir
key: ${{ runner.os }}-${{ matrix.python-version }}-conda-${{ env.CACHE_NUMBER }}

- if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
name: Set up Conda Environment
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: "xcdat_ci"
miniforge-variant: Mambaforge
miniforge-version: latest
activate-environment: "xcdat_ci"
use-mamba: true
mamba-version: "*"
environment-file: conda-env/ci.yml
channel-priority: strict
auto-update-conda: true
# IMPORTANT: This needs to be set for caching to work properly!
use-only-tar-bz2: true
python-version: ${{ matrix.python-version }}

# Refresh the cache every 24 hours to avoid inconsistencies of package versions
# between the CI pipeline and local installations.
- if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
id: get-date
name: Get Date
run: echo "today=$(/bin/date -u '+%Y%m%d')" >> $GITHUB_OUTPUT
shell: bash

- if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
id: cache
name: Cache Conda env
uses: actions/cache@v3
with:
path: ${{ env.CONDA }}/envs
key:
conda-${{ runner.os }}-${{ runner.arch }}-${{ matrix.python-version }}-${{
steps.get-date.outputs.today }}-${{hashFiles('conda-env/ci.yml') }}-${{ env.CACHE_NUMBER}}
env:
# Increase this value to reset cache if conda-env/ci.yml has not changed in the workflow
CACHE_NUMBER: 0

- if: $${{ steps.skip_check.outputs.should_skip != 'true' && steps.cache.outputs.cache-hit != 'true' }}
name: Update environment
run: mamba env update -n xcdat_ci -f conda-env/ci.yml

- if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
name: Install xcdat
# Source: https://github.com/conda/conda-build/issues/4251#issuecomment-1053460542
Expand Down
4 changes: 3 additions & 1 deletion conda-env/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ dependencies:
- pandas
- python-dateutil
- xarray
- xesmf
# Constrained because 0.6.3 breaks with import ESMF
# Source: https://github.com/pangeo-data/xESMF/issues/212
- xesmf >0.6.3
# Quality Assurance
# ==================
- types-python-dateutil
Expand Down
2 changes: 1 addition & 1 deletion conda-env/dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ dependencies:
- numpy=1.23.5
- pandas=1.5.3
- python-dateutil=2.8.2
- xarray=2023.3.0
- xarray=2023.4.2
# ==================
# Optional
# ==================
Expand Down
2 changes: 1 addition & 1 deletion conda-env/readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ dependencies:
- numpy=1.23.5
- pandas=1.5.3
- python-dateutil=2.8.2
- xarray=2023.3.0
- xarray=2023.4.2
# ==================
# Optional
# ==================
Expand Down
91 changes: 59 additions & 32 deletions tests/test_bounds.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,62 +72,89 @@ def setup(self):

def test_adds_bounds_to_the_dataset(self):
ds = self.ds_with_bnds.copy()

ds = ds.drop_vars(["lat_bnds", "lon_bnds"])

# Compare the result against the expected.
result = ds.bounds.add_missing_bounds(axes=["X", "Y"])
assert result.identical(self.ds_with_bnds)

def test_skips_adding_bounds_for_coords_that_are_1_dim_singleton(self):
# Length <=1
lon = xr.DataArray(
data=np.array([0]),
dims=["lon"],
attrs={"units": "degrees_east", "axis": "X"},
def test_skips_adding_bounds_for_coords_that_are_1_dim_singleton(self, caplog):
# NOTE: Suppress logger warning to avoid polluting test suite.
caplog.set_level(logging.CRITICAL)

# Create the input dataset.
ds = xr.Dataset(
coords={
"lon": xr.DataArray(
data=np.array([0]),
dims=["lon"],
attrs={"units": "degrees_east", "axis": "X"},
)
}
)
ds = xr.Dataset(coords={"lon": lon})

# Compare the result against the expected.
result = ds.bounds.add_missing_bounds(axes=["X"])

assert result.identical(ds)

def test_skips_adding_bounds_for_coords_that_are_0_dim_singleton(self):
# 0-dimensional array
lon = xr.DataArray(
data=float(0),
attrs={"units": "degrees_east", "axis": "X"},
def test_skips_adding_bounds_for_coords_that_are_0_dim_singleton(self, caplog):
# NOTE: Suppress logger warning to avoid polluting test suite.
caplog.set_level(logging.CRITICAL)

# Create the input dataset.
ds = xr.Dataset(
coords={
"lon": xr.DataArray(
data=float(0),
attrs={"units": "degrees_east", "axis": "X"},
)
}
)
ds = xr.Dataset(coords={"lon": lon})

# Compare the result against the expected.
result = ds.bounds.add_missing_bounds(axes=["X"])

assert result.identical(ds)

def test_skips_adding_time_bounds_for_coords_that_are_1_dim_singleton(self):
# Length <=1
time = xr.DataArray(
data=np.array(["2000-01-01T12:00:00.000000000"], dtype="datetime64[ns]"),
dims=["time"],
attrs={"calendar": "standard", "units": "days since 1850-01-01"},
def test_skips_adding_time_bounds_for_coords_that_are_1_dim_singleton(self, caplog):
# NOTE: Suppress logger warning to avoid polluting test suite.
caplog.set_level(logging.CRITICAL)

# Create the input dataset.
ds = xr.Dataset(
coords={
"time": xr.DataArray(
data=np.array(
["2000-01-01T12:00:00.000000000"], dtype="datetime64[ns]"
),
dims=["time"],
attrs={"calendar": "standard", "units": "days since 1850-01-01"},
)
}
)
ds = xr.Dataset(coords={"time": time})

# Compare the result against the expected.
result = ds.bounds.add_missing_bounds(axes=["T"])

assert result.identical(ds)

def test_skips_adding_time_bounds_for_coords_that_are_not_datetime_like_objects(
self,
self, caplog
):
time = xr.DataArray(
data=np.array([0, 1, 2]),
dims=["time"],
attrs={"calendar": "standard", "units": "days since 1850-01-01"},
# NOTE: Suppress logger warning to avoid polluting test suite.
caplog.set_level(logging.CRITICAL)

# Create the input dataset.
ds = xr.Dataset(
coords={
"time": xr.DataArray(
data=np.array([0, 1, 2]),
dims=["time"],
attrs={"calendar": "standard", "units": "days since 1850-01-01"},
)
}
)
ds = xr.Dataset(coords={"time": time})

# Compare the result against the expected.
result = ds.bounds.add_missing_bounds(axes=["T"])

assert result.identical(ds)


Expand Down Expand Up @@ -332,7 +359,7 @@ def test_raises_error_if_lat_coord_var_units_is_not_in_degrees(self):
def test_adds_bounds_and_sets_units_to_degrees_north_if_lat_coord_var_is_missing_units_attr(
self, caplog
):
# Suppress the warning
# NOTE: Suppress logger warning to avoid polluting test suite.
caplog.set_level(logging.CRITICAL)

ds = self.ds.copy()
Expand Down
Loading

0 comments on commit b896c8a

Please sign in to comment.