Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into replace_icp_python
Browse files Browse the repository at this point in the history
  • Loading branch information
rhugonnet committed Sep 5, 2024
2 parents eb2e92f + a4e7e00 commit 998e4ed
Show file tree
Hide file tree
Showing 10 changed files with 154 additions and 71 deletions.
6 changes: 3 additions & 3 deletions dev-environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ dependencies:
- matplotlib=3.*
- pyproj>=3.4,<4
- rasterio>=1.3,<2
- scipy>=1.0,<1.13
- scipy=1.*
- tqdm
- scikit-image=0.*
- scikit-gstat>=1.0,<1.1
- geoutils=0.1.8
- scikit-gstat>=1.0.18,<1.1
- geoutils=0.1.9

# Development-specific, to mirror manually in setup.cfg [options.extras_require].
- pip
Expand Down
6 changes: 3 additions & 3 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ dependencies:
- matplotlib=3.*
- pyproj>=3.4,<4
- rasterio>=1.3,<2
- scipy>=1.0,<1.13
- scipy=1.*
- tqdm
- scikit-image=0.*
- scikit-gstat>=1.0,<1.1
- geoutils=0.1.8
- scikit-gstat>=1.0.18,<1.1
- geoutils=0.1.9
- pip

# To run CI against latest GeoUtils
Expand Down
6 changes: 3 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ numpy==1.*
matplotlib==3.*
pyproj>=3.4,<4
rasterio>=1.3,<2
scipy>=1.0,<1.13
scipy==1.*
tqdm
scikit-image==0.*
scikit-gstat>=1.0,<1.1
geoutils==0.1.8
scikit-gstat>=1.0.18,<1.1
geoutils==0.1.9
pip
4 changes: 2 additions & 2 deletions tests/test_coreg/test_affine.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ def test_coreg_example(self, verbose: bool = False) -> None:
inlier_mask = ~self.outlines.create_mask(ref)

# Run co-registration
nuth_kaab = xdem.coreg.NuthKaab()
nuth_kaab = xdem.coreg.NuthKaab(offset_threshold=0.005)
nuth_kaab.fit(ref, tba, inlier_mask=inlier_mask, verbose=verbose, random_state=42)

# Check the output .metadata is always the same
Expand All @@ -258,7 +258,7 @@ def test_coreg_example(self, verbose: bool = False) -> None:
nuth_kaab.meta["outputs"]["affine"]["shift_y"],
nuth_kaab.meta["outputs"]["affine"]["shift_z"],
)
assert shifts == pytest.approx((-9.200801, -2.785496, -1.9818556))
assert shifts == pytest.approx((-9.198341, -2.786257, -1.981793))

def test_gradientdescending(self, subsample: int = 10000, verbose: bool = False) -> None:
"""
Expand Down
68 changes: 59 additions & 9 deletions tests/test_coreg/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import inspect
import re
import warnings
from typing import Any, Callable
from typing import Any, Callable, Iterable, Mapping

import geopandas as gpd
import geoutils as gu
Expand All @@ -21,7 +21,7 @@
import xdem
from xdem import coreg, examples, misc, spatialstats
from xdem._typing import NDArrayf
from xdem.coreg.base import Coreg, apply_matrix
from xdem.coreg.base import Coreg, apply_matrix, dict_key_to_str


def load_examples() -> tuple[RasterType, RasterType, Vector]:
Expand Down Expand Up @@ -90,6 +90,54 @@ def test_init(self) -> None:
assert c._is_affine is None
assert c._needs_vars is False

def test_info(self) -> None:
"""
Test all coreg keys required for info() exist by mapping all sub-keys in CoregDict and comparing to
coreg.base.dict_key_to_str.
Check the info() string return contains the right text for a given key.
"""

# This recursive function will find all sub-keys that are not TypedDict within a TypedDict
def recursive_typeddict_items(typed_dict: Mapping[str, Any]) -> Iterable[str]:
for key, value in typed_dict.__annotations__.items():
try:
sub_typed_dict = getattr(coreg.base, value.__forward_arg__)
if type(sub_typed_dict) is type(typed_dict):
yield from recursive_typeddict_items(sub_typed_dict)
except AttributeError:
yield key

# All subkeys
list_coregdict_keys = list(recursive_typeddict_items(coreg.base.CoregDict)) # type: ignore

# Assert all keys exist in the mapping key to str dictionary used for info
list_info_keys = list(dict_key_to_str.keys())

# TODO: Remove GradientDescending + ICP keys here once generic optimizer is used
# Temporary exceptions: pipeline/blockwise + gradientdescending/icp
list_exceptions = [
"step_meta",
"pipeline",
"x0",
"bounds",
"deltainit",
"deltatol",
"feps",
"rejection_scale",
"num_levels",
]

# Compare the two lists
list_missing_keys = [k for k in list_coregdict_keys if (k not in list_info_keys and k not in list_exceptions)]
if len(list_missing_keys) > 0:
raise AssertionError(
f"Missing keys in coreg.base.dict_key_to_str " f"for Coreg.info(): {', '.join(list_missing_keys)}"
)

# Check that info() contains the mapped string for an example
c = coreg.Coreg(meta={"subsample": 10000})
assert dict_key_to_str["subsample"] in c.info(verbose=False)

@pytest.mark.parametrize("coreg_class", [coreg.VerticalShift, coreg.ICP, coreg.NuthKaab]) # type: ignore
def test_copy(self, coreg_class: Callable[[], Coreg]) -> None:
"""Test that copying work expectedly (that no attributes still share references)."""
Expand Down Expand Up @@ -628,13 +676,15 @@ def test_pipeline(self) -> None:
# Assert that the combined vertical shift is 2
assert pipeline2.to_matrix()[2, 3] == 2.0

# TODO: Figure out why DirectionalBias + DirectionalBias pipeline fails with Scipy error
# on bounds constraints on Mac only?
all_coregs = [
coreg.VerticalShift,
coreg.NuthKaab,
coreg.ICP,
coreg.Deramp,
coreg.TerrainBias,
coreg.DirectionalBias,
# coreg.DirectionalBias,
]

@pytest.mark.parametrize("coreg1", all_coregs) # type: ignore
Expand Down Expand Up @@ -785,12 +835,12 @@ def test_pipeline_consistency(self) -> None:
aligned_dem, _ = many_nks.apply(self.tba.data, transform=self.ref.transform, crs=self.ref.crs)

# The last steps should have shifts of NEARLY zero
assert many_nks.pipeline[1].meta["outputs"]["affine"]["shift_z"] == pytest.approx(0, abs=0.02)
assert many_nks.pipeline[1].meta["outputs"]["affine"]["shift_x"] == pytest.approx(0, abs=0.02)
assert many_nks.pipeline[1].meta["outputs"]["affine"]["shift_y"] == pytest.approx(0, abs=0.02)
assert many_nks.pipeline[2].meta["outputs"]["affine"]["shift_z"] == pytest.approx(0, abs=0.02)
assert many_nks.pipeline[2].meta["outputs"]["affine"]["shift_x"] == pytest.approx(0, abs=0.02)
assert many_nks.pipeline[2].meta["outputs"]["affine"]["shift_y"] == pytest.approx(0, abs=0.02)
assert many_nks.pipeline[1].meta["outputs"]["affine"]["shift_z"] == pytest.approx(0, abs=0.05)
assert many_nks.pipeline[1].meta["outputs"]["affine"]["shift_x"] == pytest.approx(0, abs=0.05)
assert many_nks.pipeline[1].meta["outputs"]["affine"]["shift_y"] == pytest.approx(0, abs=0.05)
assert many_nks.pipeline[2].meta["outputs"]["affine"]["shift_z"] == pytest.approx(0, abs=0.05)
assert many_nks.pipeline[2].meta["outputs"]["affine"]["shift_x"] == pytest.approx(0, abs=0.05)
assert many_nks.pipeline[2].meta["outputs"]["affine"]["shift_y"] == pytest.approx(0, abs=0.05)

# Test 2: Reflectivity
# Those two pipelines should give almost the same result
Expand Down
10 changes: 5 additions & 5 deletions tests/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ class TestExamples:
ddem,
np.array(
[
1.3690491,
-1.6708069,
0.12875366,
-10.096863,
2.486084,
1.3699341,
-1.6713867,
0.12953186,
-10.096802,
2.486206,
],
dtype=np.float32,
),
Expand Down
47 changes: 39 additions & 8 deletions xdem/coreg/affine.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,7 +412,7 @@ def sub_dh_interpolator(shift_x: float, shift_y: float) -> NDArrayf:
if ref == "point":
return diff_rst_pts
else:
return diff_rst_pts
return -diff_rst_pts

# Interpolate arrays of bias variables to the subsample point coordinates
if aux_vars is not None:
Expand Down Expand Up @@ -498,7 +498,11 @@ def _nuth_kaab_fit_func(xx: NDArrayf, *params: tuple[float, float, float]) -> ND
where y = dh/tan(slope) and x = aspect.
:param xx: The aspect in radians.
<<<<<<< HEAD
:param params: Parameters.
=======
:param params: Parameters a, b and c of above function.
>>>>>>> upstream/main
:returns: Estimated y-values with the same shape as the given x-values
"""
Expand Down Expand Up @@ -563,6 +567,11 @@ def _nuth_kaab_aux_vars(
) -> tuple[NDArrayf, NDArrayf]:
"""
Deriving slope tangent and aspect auxiliary variables expected by the Nuth and Kääb (2011) algorithm.
<<<<<<< HEAD
=======
:return: Slope tangent and aspect (radians).
>>>>>>> upstream/main
"""

def _calculate_slope_and_aspect_nuthkaab(dem: NDArrayf) -> tuple[NDArrayf, NDArrayf]:
Expand Down Expand Up @@ -633,6 +642,17 @@ def _nuth_kaab_iteration_step(
Iteration step of Nuth and Kääb (2011), passed to the iterate_method function.
Returns newly incremented coordinate offsets, and new statistic to compare to tolerance to reach.
<<<<<<< HEAD
=======
:param coords_offsets: Coordinate offsets at this iteration (easting, northing, vertical) in georeferenced unit.
:param dh_interpolator: Interpolator returning elevation differences at the subsampled points for a certain
horizontal offset (see _preprocess_pts_rst_subsample_interpolator).
:param slope_tan: Array of slope tangent.
:param aspect: Array of aspect.
:param res: Resolution of DEM.
:param verbose: Whether to print statements.
>>>>>>> upstream/main
"""

# Calculate the elevation difference with offsets
Expand Down Expand Up @@ -701,7 +721,7 @@ def nuth_kaab(
# Check that DEM CRS is projected, otherwise slope is not correctly calculated
if not crs.is_projected:
raise NotImplementedError(
f"NuthKaab coregistration only works with in a projected CRS, current CRS is {crs}. Reproject "
f"NuthKaab coregistration only works with a projected CRS, current CRS is {crs}. Reproject "
f"your DEMs with DEM.reproject() in a local projected CRS such as UTM, that you can find "
f"using DEM.get_metric_crs()."
)
Expand Down Expand Up @@ -761,13 +781,14 @@ def _gradient_descending_fit_func(
"""
Fitting function of gradient descending method, returns the NMAD of elevation residuals.
:param coords_offsets: Coordinate offsets at this iteration (easting, northing) in georeferenced unit.
:param dh_interpolator: Interpolator returning elevation differences at the subsampled points for a certain
horizontal offset (see _preprocess_pts_rst_subsample_interpolator).
:returns: NMAD of residuals.
"""

# Calculate the elevation difference
dh = dh_interpolator(coords_offsets[0], coords_offsets[1])
vshift = -np.nanmedian(dh)
dh += vshift

# Return NMAD of residuals
return float(nmad(dh))
Expand All @@ -779,6 +800,17 @@ def _gradient_descending_fit(
params_noisyopt: InSpecificDict,
verbose: bool = False,
) -> tuple[float, float, float]:
"""
Optimize the statistical dispersion of the elevation differences residuals.
:param dh_interpolator: Interpolator returning elevation differences at the subsampled points for a certain
horizontal offset (see _preprocess_pts_rst_subsample_interpolator).
:param res: Resolution of DEM.
:param params_noisyopt: Parameters for noisyopt minimization.
:param verbose: Whether to print statements.
:return: Optimized offsets (easing, northing, vertical) in georeferenced unit.
"""
# Define cost function
def func_cost(offset: tuple[float, float]) -> float:
return _gradient_descending_fit_func(offset, dh_interpolator=dh_interpolator)
Expand Down Expand Up @@ -825,7 +857,6 @@ def gradient_descending(
including subsampling and interpolation to the same points.
:return: Final estimated offset: east, north, vertical (in georeferenced units).
"""
if not _has_noisyopt:
raise ValueError("Optional dependency needed. Install 'noisyopt'")
Expand Down Expand Up @@ -1408,10 +1439,10 @@ class NuthKaab(AffineCoreg):
def __init__(
self,
max_iterations: int = 10,
offset_threshold: float = 0.05,
offset_threshold: float = 0.001,
bin_before_fit: bool = True,
fit_optimizer: Callable[..., tuple[NDArrayf, Any]] = scipy.optimize.curve_fit,
bin_sizes: int | dict[str, int | Iterable[float]] = 80,
bin_sizes: int | dict[str, int | Iterable[float]] = 72,
bin_statistic: Callable[[NDArrayf], np.floating[Any]] = np.nanmedian,
subsample: int | float = 5e5,
) -> None:
Expand All @@ -1437,7 +1468,7 @@ def __init__(
meta_input_iterative = {"max_iterations": max_iterations, "tolerance": offset_threshold}

# Define parameters exactly as in BiasCorr, but with only "fit" or "bin_and_fit" as option, so a bin_before_fit
# boolean, no bin apply option, and fit_func is preferefind
# boolean, no bin apply option, and fit_func is predefined
if not bin_before_fit:
meta_fit = {"fit_or_bin": "fit", "fit_func": _nuth_kaab_fit_func, "fit_optimizer": fit_optimizer}
meta_fit.update(meta_input_iterative)
Expand Down
Loading

0 comments on commit 998e4ed

Please sign in to comment.