From 767c4405615a72932c8536eebf5808087c44ca46 Mon Sep 17 00:00:00 2001 From: Romain Hugonnet Date: Fri, 6 Sep 2024 16:36:31 -0800 Subject: [PATCH] Incremental commit on doc --- doc/source/api.md | 11 +- doc/source/biascorr.md | 27 +++-- doc/source/cheatsheet.md | 71 +++++++++++++ doc/source/conf.py | 2 +- doc/source/coregistration.md | 153 +++++++++++++++++++++------- doc/source/dem_class.md | 3 + doc/source/elevation_point_cloud.md | 2 +- doc/source/guides.md | 1 + doc/source/terrain.md | 4 +- doc/source/uncertainty.md | 8 +- tests/test_coreg/test_affine.py | 4 +- xdem/coreg/affine.py | 120 +++++++++++----------- xdem/coreg/base.py | 81 ++++++++++++++- xdem/terrain.py | 6 +- 14 files changed, 362 insertions(+), 131 deletions(-) diff --git a/doc/source/api.md b/doc/source/api.md index 4acb3d94..05e79a0a 100644 --- a/doc/source/api.md +++ b/doc/source/api.md @@ -231,17 +231,19 @@ To build and pass your coregistration pipeline to {func}`~xdem.DEM.coregister_3d .. autosummary:: :toctree: gen_modules/ + xdem.coreg.Coreg.fit_and_apply xdem.coreg.Coreg.fit xdem.coreg.Coreg.apply ``` -#### Other functionalities +#### Extracting metadata ```{eval-rst} .. autosummary:: :toctree: gen_modules/ - xdem.coreg.Coreg.residuals + xdem.coreg.Coreg.info + xdem.coreg.Coreg.meta ``` ### Affine coregistration @@ -274,6 +276,11 @@ To build and pass your coregistration pipeline to {func}`~xdem.DEM.coregister_3d xdem.coreg.AffineCoreg.from_matrix xdem.coreg.AffineCoreg.to_matrix + xdem.coreg.AffineCoreg.from_translations + xdem.coreg.AffineCoreg.to_translations + xdem.coreg.AffineCoreg.from_rotations + xdem.coreg.AffineCoreg.to_rotations + xdem.coreg.apply_matrix xdem.coreg.invert_matrix ``` diff --git a/doc/source/biascorr.md b/doc/source/biascorr.md index c2aa3a49..49800072 100644 --- a/doc/source/biascorr.md +++ b/doc/source/biascorr.md @@ -76,8 +76,7 @@ Once defined, they can be applied the same two ways as for coregistration (using # Coregister with bias correction by calling the DEM method corrected_dem = tba_dem.coregister_3d(ref_dem, biascorr) # (Equivalent) Or by calling the fit and apply steps -biascorr.fit(ref_dem, tba_dem) -corrected_dem = biascorr.apply(tba_dem) +corrected_dem = biascorr.fit_and_apply(ref_dem, tba_dem) ``` Additionally, **bias corrections can be customized to use any number of variables to correct simultaneously**, @@ -188,8 +187,7 @@ tbc_dem_ramp = ref_dem + synthetic_bias # Instantiate a 2nd order 2D deramping deramp = xdem.coreg.Deramp(poly_order=2) # Fit and apply -deramp.fit(ref_dem, tbc_dem_ramp) -corrected_dem = deramp.apply(tbc_dem_ramp) +corrected_dem = deramp.fit_and_apply(ref_dem, tbc_dem_ramp) ``` ```{code-cell} ipython3 @@ -243,8 +241,7 @@ tbc_dem_sumsin = ref_dem + synthetic_bias # Define a directional bias correction at a certain angle (degrees), defaults to "bin_and_fit" for a sum of sinusoids dirbias = xdem.coreg.DirectionalBias(angle=20) # Fit and apply -dirbias.fit(ref_dem, tbc_dem_sumsin, random_state=42) -corrected_dem = dirbias.apply(tbc_dem_sumsin) +corrected_dem = dirbias.fit_and_apply(ref_dem, tbc_dem_sumsin, random_state=42) ``` ```{code-cell} ipython3 @@ -290,8 +287,7 @@ tbc_dem_strip = ref_dem + synthetic_bias # Define a directional bias correction at a certain angle (degrees), for a binning of 1000 bins dirbias = xdem.coreg.DirectionalBias(angle=60, fit_or_bin="bin", bin_sizes=1000) # Fit and apply -dirbias.fit(ref_dem, tbc_dem_strip) -corrected_dem = dirbias.apply(tbc_dem_strip) +corrected_dem = dirbias.fit_and_apply(ref_dem, tbc_dem_strip) ``` ```{code-cell} ipython3 @@ -346,10 +342,9 @@ tbc_dem_curv = ref_dem + synthetic_bias terbias = xdem.coreg.TerrainBias(terrain_attribute="maximum_curvature", bin_sizes={"maximum_curvature": np.linspace(-5, 5, 1000)}, bin_apply_method="per_bin") -# Fit and apply to the data -terbias.fit(ref_dem, tbc_dem_curv) + # We have to pass the original curvature here -corrected_dem = terbias.apply(tbc_dem_curv, bias_vars={"maximum_curvature": maxc}) +corrected_dem = terbias.fit_and_apply(ref_dem, tbc_dem_curv, bias_vars={"maximum_curvature": maxc}) ``` ```{code-cell} ipython3 @@ -408,9 +403,13 @@ biascorr = xdem.coreg.BiasCorr(bias_var_names=["aspect", "slope", "elevation"], # Derive curvature and slope aspect, slope = ref_dem.get_terrain_attribute(["aspect", "slope"]) -# Pass the variables to the fit function, matching the names declared above, and same for apply -biascorr.fit(ref_dem, tba_dem_nk, inlier_mask=inlier_mask, bias_vars={"aspect": aspect, "slope": slope, "elevation": ref_dem}) -corrected_dem = biascorr.apply(tba_dem_nk, bias_vars={"aspect": aspect, "slope": slope, "elevation": ref_dem}) +# Pass the variables to the fit_and_apply function matching the names declared above +corrected_dem = biascorr.fit_and_apply( + ref_dem, + tba_dem_nk, + inlier_mask=inlier_mask, + bias_vars={"aspect": aspect, "slope": slope, "elevation": ref_dem} +) ``` ```{warning} diff --git a/doc/source/cheatsheet.md b/doc/source/cheatsheet.md index 39dcd3f9..a1f176fd 100644 --- a/doc/source/cheatsheet.md +++ b/doc/source/cheatsheet.md @@ -1,3 +1,74 @@ (cheatsheet)= # Cheatsheet: How to correct... ? + +In elevation data analysis, the problem generally starts with identifying what correction method to apply when +observing a specific pattern of error in your own data. + +Below, we summarize a cheatsheet that links what method is likely to correct a pattern of error you can visually +identify on **static surfaces of a map of elevation differences with another elevation dataset**! + +## Cheatsheet + +The patterns of errors categories listed in this spreadsheet **are linked to visual example further below**, so that +you can use them as a reference to compare to your own elevation differences. + +```{list-table} + :widths: 1 2 2 2 + :header-rows: 1 + :stub-columns: 1 + + * - Pattern + - Description + - Cause and correction + - Notes + * - {ref}`sharp-landforms` + - Positive and negative errors that are larger near high slopes, making landforms appear visually. + - Likely horizontal shift due to geopositioning errors, use a {ref}`coregistration` such as {class}`~xdem.coreg.NuthKaab`. + - Even a tiny horizontal misalignment (1/10th of a pixel) can be visually identified! + * - {ref}`smooth-large-field` + - Smooth offsets varying at scale of 10 km+, often same sign (either positive or negative). + - Likely wrong {ref}`vertical-ref`, can set and transform with {func}`~xdem.DEM.set_vcrs` and {func}`~xdem.DEM.to_vcrs`. + - Vertical references often only exists in a user guide, they are not coded in the raster CRS and need to be set manually. + * - {ref}`ramp-or-dome` + - Ramping errors, often near the edge of the data extent, sometimes with a center dome. + - Likely ramp/rotations due to camera errors, use either a {ref}`coregistration` such as {class}`~xdem.coreg.ICP` or a {ref}`bias-correction` such as {class}`~xdem.coreg.Deramp`. + - Can sometimes be more rigorously fixed ahead of DEM generation with bundle adjustment. + * - {ref}`undulations` + - Positive and negative errors undulating patterns at one or several frequencies well larger than pixel size. + - Likely jitter-type errors, use a {ref}`bias-correction` such as {class}`~xdem.coreg.DirectionalBias`. + - Can sometimes be more rigorously fixed ahead of DEM generation with jitter correction. + * - {ref}`point-oscillation` + - Point data errors that oscillate between negative and positive. + - Likely wrong point-raster comparison, use [point interpolation or reduction on the raster instead](https://geoutils.readthedocs.io/en/stable/raster_vector_point.html#rasterpoint-operations). + - Rasterizing point data introduces spatially correlated random errors, instead it is recommended to interpolate raster data at the point coordinates. +``` + +## Visual patterns of errors + +(sharp-landforms)= +### Sharp landforms + +```{code-cell} ipython3 +# Simulate a translation +x_shift = 5 +y_shift = 5 +dem_shift = dem.translate(x_shift, y_shift) + +# Resample and plot +dh = dem - dem_shift.reproject(dem) +dh.plot(cmap='RdYlBu', vmin=-5, vmax=5, ax=ax[1], cbar_title="Elevation differences (m)") +``` + +(smooth-large-field)= +### Smooth large-scale offset field + +(ramp-or-dome)= +### Ramp or dome + +(undulations)= +### Undulations + +(point-oscillation)= +### Point oscillation + diff --git a/doc/source/conf.py b/doc/source/conf.py index bd9ed07c..95a2a6cf 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -64,6 +64,7 @@ nb_kernel_rgx_aliases = {".*xdem.*": "python3"} nb_execution_raise_on_error = True # To fail documentation build on notebook execution error nb_execution_show_tb = True # To show full traceback on notebook execution error +nb_output_stderr = "warn" # To warn if an error is raised in a notebook cell (if intended, override to "show" in cell) # autosummary_generate = True @@ -173,7 +174,6 @@ def setup(app): "announcement": ( "⚠️ Our 0.1 release refactored several early-development functions for long-term stability, " 'to update your code see here. ⚠️' - "
Future changes will come with deprecation warnings! 🙂" ), } diff --git a/doc/source/coregistration.md b/doc/source/coregistration.md index 36d9acd3..27f08c3b 100644 --- a/doc/source/coregistration.md +++ b/doc/source/coregistration.md @@ -19,9 +19,10 @@ kernelspec: xDEM implements a wide range of **coregistration algorithms and pipelines for 3-dimensional alignment** from the peer-reviewed literature often tailored specifically to elevation data, aiming at correcting systematic elevation errors. -Two categories of alignment are generally differentiated: **3D affine transformations** described below, and other -alignments that possibly rely on external variables described in {ref}`biascorr`. +Two categories of alignment are generally differentiated: 3D [affine transformations](https://en.wikipedia.org/wiki/Affine_transformation) +described below, and other alignments that possibly rely on external variables, described in {ref}`biascorr`. +Affine transformations can include vertical and horizontal translations, rotations and reflections, and scalings. :::{admonition} More reading :class: tip @@ -73,48 +74,41 @@ tba_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) aligned_dem = tba_dem.coregister_3d(ref_dem, my_coreg_pipeline) ``` -Alternatively, the coregistration can be applied by sequentially calling the {func}`~xdem.coreg.Coreg.fit` and {func}`~xdem.coreg.Coreg.apply` steps, -which allows a broader variety of arguments at each step, and re-using the same transformation to several objects (e.g., horizontal shift of both a stereo DEM and its ortho-image). +Alternatively, the coregistration can be applied by calling {func}`~xdem.coreg.Coreg.fit_and_apply`, or sequentially +calling the {func}`~xdem.coreg.Coreg.fit` and {func}`~xdem.coreg.Coreg.apply` steps, +which allows a broader variety of arguments at each step, and re-using the same transformation to several objects +(e.g., horizontal shift of both a stereo DEM and its ortho-image). ```{code-cell} ipython3 -# (Equivalent) Or, use fit and apply in two calls -my_coreg_pipeline.fit(ref_dem, tba_dem) -aligned_dem = my_coreg_pipeline.apply(tba_dem) +# (Equivalent) Or use fit and apply +aligned_dem = my_coreg_pipeline.fit_and_apply(ref_dem, tba_dem) ``` +Information about the coregistration inputs and outputs is summarized in {func}`~xdem.coreg.Coreg.info`. + ```{tip} Often, an `inlier_mask` has to be passed to {func}`~xdem.coreg.Coreg.fit` to isolate static surfaces to utilize during coregistration (for instance removing vegetation, snow, glaciers). This mask can be easily derived using {func}`~geoutils.Vector.create_mask`. ``` -## What is coregistration? - -Coregistration is the process of finding a transformation to align data in a certain number of dimensions. In the case -of elevation data, in three dimensions. - -Transformations that can be described by a 3-dimensional [affine](https://en.wikipedia.org/wiki/Affine_transformation) -function are included in coregistration methods, which include: - -- vertical and horizontal translations, -- rotations, reflections, -- scalings. - ## Using a coregistration (coreg_object)= ### The {class}`~xdem.coreg.Coreg` object Each coregistration method implemented in xDEM inherits their interface from the {class}`~xdem.coreg.Coreg` class1, and has the following methods: -- {func}`~xdem.coreg.Coreg.fit` for estimating the transform. -- {func}`~xdem.coreg.Coreg.apply` for applying the transform to a DEM. -- {func}`~xdem.coreg.AffineCoreg.to_matrix` to convert the transform to a 4x4 transformation matrix, if possible. +- {func}`~xdem.coreg.Coreg.fit_and_apply` for estimating the transformation and applying it in one step, +- {func}`~xdem.coreg.Coreg.info` for plotting the metadata, including inputs and outputs of the coregistration. + +The two above methods cover most uses. More specific methods are also available: +- {func}`~xdem.coreg.Coreg.fit` for estimating the transformation without applying it, +- {func}`~xdem.coreg.Coreg.apply` for applying an estimated transformation, +- {func}`~xdem.coreg.AffineCoreg.to_matrix` to convert the transform to a 4x4 transformation matrix, if possible, - {func}`~xdem.coreg.AffineCoreg.from_matrix` to create a coregistration from a 4x4 transformation matrix. ```{margin} 1In a style inspired by [scikit-learn's pipelines](https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LinearRegression.html#sklearn-linear-model-linearregression). ``` -{func}`~xdem.coreg.Coreg.fit` is called to estimate the transform, and then this transform can be used or exported using the subsequent methods. - **Inheritance diagram of implemented coregistrations:** ```{eval-rst} @@ -126,7 +120,66 @@ See {ref}`biascorr` for more information on non-rigid transformations ("bias cor ### Accessing coregistration metadata +The metadata surrounding a coregistration method, which can be displayed by {func}`~xdem.coreg.Coreg.info`, is stored in +the {attr}`~xdem.coreg.Coreg.meta` nested dictionary. +This metadata is divided into **inputs** and **outputs**. Input metadata corresponds to what arguments are +used when initializing a {class}`~xdem.coreg.Coreg` object, while output metadata are created during the call to +{func}`~xdem.coreg.Coreg.fit`. Together, they allow to apply the transformation during the +{func}`~xdem.coreg.Coreg.apply` step of the coregistration. + +```{code-cell} ipython3 +# Example of metadata info after fitting +my_coreg_pipeline.info() +``` + +For both **inputs** and **outputs**, four consistent categories of metadata are defined. + +**Note:** Some metadata, such as the parameters `fit_or_bin` and `fit_func` described below, are pre-defined for affine coregistration methods and cannot be modified. They only take user-specified value for {ref}`biascorr`. +**1. Randomization metadata (common to all)**: + +- An input `subsample` to define the subsample size of valid data to use in all methods (recommended for performance), +- An input `random_state` to define the random seed for reproducibility of the subsampling (and potentially other random aspects such as optimizers), +- An output `subsample_final` that stores the final subsample size used, which can be smaller than requested depending on the amount of valid data intersecting the two elevation datasets. + +**2. Fitting and binning metadata (common to nearly all methods)**: + +- An input `fit_or_bin` to either fit a parametric model by passing **"fit"**, perform an empirical binning by passing **"bin"**, or to fit a parametric model to the binning with **"bin_and_fit" (only "fit" or "bin_and_fit" possible for affine methods)**, +- An input `fit_func` to pass any parametric function to fit to the bias **(pre-defined for affine methods)**, +- An input `fit_optimizer` to pass any optimizer function to perform the fit minimization, +- An input `bin_sizes` to pass the size or edges of the bins for each variable, +- An input `bin_statistic` to pass the statistic to compute in each bin, +- An input `bin_apply_method` to pass the method to apply the binning for correction, +- An output `fit_params` that stores the optimized parameters for `fit_func`, +- An output `fit_perr` that stores the error of optimized parameters (only for default `fit_optimizer`), +- An output `bin_dataframe` that stores the dataframe of binned statistics. + +**3. Iteration metadata (common to all iterative methods)**: + +- An input `max_iterations` to define the maximum number of iterations at which to stop the method, +- An input `tolerance` to define the tolerance at which to stop iterations (tolerance unit defined in method description), +- An output `last_iteration` that stores the last iteration of the method, +- An output `all_tolerances` that stores the tolerances computed at each iteration. + +**4. Affine metadata (common to all affine methods)**: + +- An output `matrix` that stores the estimated affine matrix, +- An output `centroid` that stores the centroid coordinates with which to apply the affine transformation, +- Outputs `shift_x`, `shift_y` and `shift_z` that store the easting, northing and vertical offsets, respectively. + +```{tip} +In xDEM, you can extract the translations and rotations of an affine matrix using {class}`xdem.coreg.AffineCoreg.to_translations` and +{class}`xdem.coreg.AffineCoreg.to_rotations`. + +To further manipulate affine matrices, see the [documentation of pytransform3d](https://dfki-ric.github.io/pytransform3d/rotations.html). +``` + +**5. Specific metadata (only for certain methods)**: + +These metadata are only inputs specific to a given method, outlined in the method description. + +For instance, for {class}`xdem.coreg.Deramp`, an input `poly_order` to define the polynomial order used for the fit, and +for {class}`xdem.coreg.DirectionalBias`, an input `angle` to define the angle at which to do the directional correction. ## Coregistration methods @@ -176,8 +229,7 @@ tba_dem_shift = xdem.coreg.apply_matrix(ref_dem, matrix) # Define a coregistration based on the Nuth and Kääb (2011) method nuth_kaab = xdem.coreg.NuthKaab() # Fit to data and apply -nuth_kaab.fit(ref_dem, tba_dem_shift) -aligned_dem = nuth_kaab.apply(tba_dem_shift) +aligned_dem = nuth_kaab.fit_and_apply(ref_dem, tba_dem_shift) ``` ```{code-cell} ipython3 @@ -221,8 +273,7 @@ tba_dem_vshift = ref_dem + 10 # Define a coregistration object based on a vertical shift correction vshift = xdem.coreg.VerticalShift(vshift_reduc_func=np.median) # Fit and apply -vshift.fit(ref_dem, tba_dem_vshift) -aligned_dem = vshift.apply(tba_dem_vshift) +aligned_dem = vshift.fit_and_apply(ref_dem, tba_dem_vshift) ``` ```{code-cell} ipython3 @@ -264,9 +315,9 @@ ICP is currently based on [OpenCV's implementation](https://docs.opencv.org/4.x/ # Apply a rotation of 0.2 degrees and X/Y/Z shifts to elevation in meters rotation = np.deg2rad(0.2) -x_shift = 20 -y_shift = 20 -z_shift = 5 +x_shift = 100 +y_shift = 200 +z_shift = 50 # Affine matrix for 3D transformation matrix = np.array( [ @@ -276,16 +327,16 @@ matrix = np.array( [0, 0, 0, 1], ] ) +centroid = [ref_dem.bounds.left + 5000, ref_dem.bounds.top - 2000, np.median(ref_dem) + 100] # We create misaligned elevation data -tba_dem_shifted_rotated = xdem.coreg.apply_matrix(ref_dem, matrix) +tba_dem_shifted_rotated = xdem.coreg.apply_matrix(ref_dem, matrix, centroid=centroid) ``` ```{code-cell} ipython3 # Define a coregistration based on ICP icp = xdem.coreg.ICP() # Fit to data and apply -icp.fit(ref_dem, tba_dem_shifted_rotated) -aligned_dem = icp.apply(tba_dem_shifted_rotated) +aligned_dem = icp.fit_and_apply(ref_dem, tba_dem_shifted_rotated) ``` ```{code-cell} ipython3 @@ -327,8 +378,7 @@ The results of a pipeline can be used in other programs by exporting the combine ```{code-cell} ipython3 # Fit to data and apply the pipeline of ICP + Nuth and Kääb -pipeline.fit(ref_dem, tba_dem_shifted_rotated) -aligned_dem = pipeline.apply(tba_dem_shifted_rotated) +aligned_dem = pipeline.fit_and_apply(ref_dem, tba_dem_shifted_rotated) ``` ```{code-cell} ipython3 @@ -372,4 +422,35 @@ pipeline = xdem.coreg.VerticalShift() + xdem.coreg.ICP() + xdem.coreg.NuthKaab() ### The {class}`~xdem.coreg.BlockwiseCoreg` object +Sometimes, we want to split a coregistration across different spatial subsets of an elevation dataset, running that +method independently in each subset. A {class}`~xdem.coreg.BlockwiseCoreg` can be constructed for this: + +```{code-cell} ipython3 +blockwise = xdem.coreg.BlockwiseCoreg(xdem.coreg.NuthKaab(), subdivision=16) +``` + +The subdivision corresponds to an equal-length block division across the extent of the elevation dataset. It needs +to be a number of the form 2{sup}`n` (such as 4 or 256). + +It is run the same way as other coregistrations: + +```{code-cell} ipython3 +# Run 16 block coregistrations +blockwise.fit(ref_dem, tba_dem_shifted_rotated) +aligned_dem = blockwise.apply(tba_dem_shifted_rotated) +``` + +```{code-cell} ipython3 +:tags: [hide-input] +:mystnb: +: code_prompt_show: "Show plotting code" +: code_prompt_hide: "Hide plotting code" +# Plot before and after +f, ax = plt.subplots(1, 2) +ax[0].set_title("Before block\nNK + Deramp") +(tba_dem_shifted_rotated - ref_dem).plot(cmap='RdYlBu', vmin=-30, vmax=30, ax=ax[0]) +ax[1].set_title("After block\nNK + Deramp") +(aligned_dem - ref_dem).plot(cmap='RdYlBu', vmin=-30, vmax=30, ax=ax[1], cbar_title="Elevation differences (m)") +_ = ax[1].set_yticklabels([]) +``` \ No newline at end of file diff --git a/doc/source/dem_class.md b/doc/source/dem_class.md index bc28e8f7..b91b5318 100644 --- a/doc/source/dem_class.md +++ b/doc/source/dem_class.md @@ -89,6 +89,9 @@ import geoutils as gu fn_glacier_outlines = xdem.examples.get_path("longyearbyen_glacier_outlines") vect_gla = gu.Vector(fn_glacier_outlines) +# Crop outlines to those intersecting the DEM +vect_gla = vect_gla.crop(dem) + # Plot the DEM and the vector file dem.plot(cmap="terrain", cbar_title="Elevation (m)") vect_gla.plot(dem) # We pass the DEM as reference for the plot CRS/extent diff --git a/doc/source/elevation_point_cloud.md b/doc/source/elevation_point_cloud.md index bd04faaa..0485ad47 100644 --- a/doc/source/elevation_point_cloud.md +++ b/doc/source/elevation_point_cloud.md @@ -17,4 +17,4 @@ kernelspec: In construction, planned for 2024. However, **elevation point clouds are already supported for coregistration and bias correction** by passing a {class}`geopandas.GeoDataFrame` -associated to an elevation column name argument `z_name` to {func}`xdem.coreg.Coreg.fit` or {func}`xdem.coreg.Coreg.apply`. +associated to an elevation column name argument `z_name` to {func}`~xdem.coreg.Coreg.fit_and_apply`. diff --git a/doc/source/guides.md b/doc/source/guides.md index 0816a6c4..f27abf18 100644 --- a/doc/source/guides.md +++ b/doc/source/guides.md @@ -12,4 +12,5 @@ static_surfaces accuracy_precision robust_estimators spatial_stats +reporting_metrics ``` diff --git a/doc/source/terrain.md b/doc/source/terrain.md index 7df5df38..777898f0 100644 --- a/doc/source/terrain.md +++ b/doc/source/terrain.md @@ -93,7 +93,7 @@ If computational performance is key, xDEM can rely on [RichDEM](https://richdem. - [Riley et al. (1999)](http://download.osgeo.org/qgis/doc/reference-docs/Terrain_Ruggedness_Index.pdf) or [Wilson et al. (2007)](http://dx.doi.org/10.1080/01490410701295962) * - {ref}`roughness` - Meters - - [Dartnell (2000)](http://dx.doi.org/10.14358/PERS.70.9.1081) + - [Dartnell (2000)](https://environment.sfsu.edu/node/11292) * - {ref}`rugosity` - Unitless - [Jenness (2004)]() @@ -342,7 +342,7 @@ tri.plot(cmap="Purples", cbar_title="Terrain ruggedness index (m)") {func}`xdem.DEM.roughness` The roughness is a metric of terrain ruggedness, based on the maximum difference in elevation in the surroundings, -described in [Dartnell (2000)](http://dx.doi.org/10.14358/PERS.70.9.1081). Its unit is that of the DEM (typically meters) and it can be computed for any window size (default 3x3 pixels). +described in [Dartnell (2000)](https://environment.sfsu.edu/node/11292). Its unit is that of the DEM (typically meters) and it can be computed for any window size (default 3x3 pixels). $$ r_{\textrm{D}} = \textrm{max}_{ij} (h{ij}) - \textrm{min}_{ij} (h{ij}) . diff --git a/doc/source/uncertainty.md b/doc/source/uncertainty.md index 6d33336d..a1145635 100644 --- a/doc/source/uncertainty.md +++ b/doc/source/uncertainty.md @@ -30,7 +30,7 @@ While uncertainty analysis technically refers to both systematic and random erro are corrected using {ref}`coregistration` and {ref}`biascorr`, so we here refer to **uncertainty analysis for quantifying and propagating random errors**. -In detail, xDEM provide tools to: +In detail, xDEM provides tools to: 1. Estimate and model elevation **heteroscedasticity, i.e. variable random errors** (e.g., such as with terrain slope or stereo-correlation), 2. Estimate and model the **spatial correlation of random errors** (e.g., from native spatial resolution or instrument noise), @@ -47,7 +47,7 @@ Additionally, we recommend reading the **{ref}`static-surfaces` guide page** on ## Quick use -The estimation of the spatial structure of random errors of elevation data (heteroscedas) is conveniently +The estimation of the spatial structure of random errors of elevation data is conveniently wrapped in a single method {func}`~xdem.DEM.estimate_uncertainty`, for which the steps are detailed below. ```{code-cell} ipython3 @@ -106,8 +106,8 @@ potential multiple ranges of spatial correlation (instead of a single one). In a considers potential heteroscedasticity or variable errors (instead of homoscedasticity, or constant errors), also common in elevation data. Because accounting for possible multiple correlation ranges also works if you have a single correlation range in your data, -and accounting for potential heteroscedasticity also works on homoscedastic data, **there is nothing to lose by using -a more advanced framework!** +and accounting for potential heteroscedasticity also works on homoscedastic data, **there is little to lose by using +a more advanced framework! (most often, only a bit of additional computation time)** ```{list-table} :widths: 1 1 1 1 1 diff --git a/tests/test_coreg/test_affine.py b/tests/test_coreg/test_affine.py index 0baa6114..d1eb0ed2 100644 --- a/tests/test_coreg/test_affine.py +++ b/tests/test_coreg/test_affine.py @@ -161,13 +161,13 @@ def test_from_classmethods(self) -> None: # Check that the from_translation function works as expected. x_offset = 5 - coreg_obj2 = AffineCoreg.from_translation(x_off=x_offset) + coreg_obj2 = AffineCoreg.from_translations(x_off=x_offset) transformed_points2 = coreg_obj2.apply(self.points) assert np.array_equal(self.points.geometry.x.values + x_offset, transformed_points2.geometry.x.values) # Try to make a Coreg object from a nan translation (should fail). try: - AffineCoreg.from_translation(np.nan) + AffineCoreg.from_translations(np.nan) except ValueError as exception: if "non-finite values" not in str(exception): raise exception diff --git a/xdem/coreg/affine.py b/xdem/coreg/affine.py index ca4e58cc..455402f6 100644 --- a/xdem/coreg/affine.py +++ b/xdem/coreg/affine.py @@ -32,11 +32,13 @@ _bin_or_and_fit_nd, _get_subsample_mask_pts_rst, _preprocess_pts_rst_subsample, + _reproject_horizontal_shift_samecrs, ) from xdem.spatialstats import nmad try: import pytransform3d.transformations + import pytransform3d.rotations _HAS_P3D = True except ImportError: @@ -53,68 +55,6 @@ # Generic functions for affine methods ###################################### - -@overload -def _reproject_horizontal_shift_samecrs( - raster_arr: NDArrayf, - src_transform: rio.transform.Affine, - dst_transform: rio.transform.Affine = None, - *, - return_interpolator: Literal[False] = False, - resampling: Literal["nearest", "linear", "cubic", "quintic", "slinear", "pchip", "splinef2d"] = "linear", -) -> NDArrayf: - ... - - -@overload -def _reproject_horizontal_shift_samecrs( - raster_arr: NDArrayf, - src_transform: rio.transform.Affine, - dst_transform: rio.transform.Affine = None, - *, - return_interpolator: Literal[True], - resampling: Literal["nearest", "linear", "cubic", "quintic", "slinear", "pchip", "splinef2d"] = "linear", -) -> Callable[[tuple[NDArrayf, NDArrayf]], NDArrayf]: - ... - - -def _reproject_horizontal_shift_samecrs( - raster_arr: NDArrayf, - src_transform: rio.transform.Affine, - dst_transform: rio.transform.Affine = None, - return_interpolator: bool = False, - resampling: Literal["nearest", "linear", "cubic", "quintic", "slinear", "pchip", "splinef2d"] = "linear", -) -> NDArrayf | Callable[[tuple[NDArrayf, NDArrayf]], NDArrayf]: - """ - Reproject a raster only for a horizontal shift (transform update) in the same CRS. - - This function exists independently of Raster.reproject() because Rasterio has unexplained reprojection issues - that can create non-negligible sub-pixel shifts that should be crucially avoided for coregistration. - See https://github.com/rasterio/rasterio/issues/2052#issuecomment-2078732477. - - Here we use SciPy interpolation instead, modified for nodata propagation in geoutils.interp_points(). - """ - - # We are reprojecting the raster array relative to itself without changing its pixel interpretation, so we can - # force any pixel interpretation (area_or_point) without it having any influence on the result, here "Area" - if not return_interpolator: - coords_dst = _coords(transform=dst_transform, area_or_point="Area", shape=raster_arr.shape) - # If we just want the interpolator, we don't need to coordinates of destination points - else: - coords_dst = None - - output = _interp_points( - array=raster_arr, - area_or_point="Area", - transform=src_transform, - points=coords_dst, - method=resampling, - return_interpolator=return_interpolator, - ) - - return output - - def _check_inputs_bin_before_fit( bin_before_fit: bool, fit_optimizer: Callable[..., tuple[NDArrayf, Any]], @@ -845,6 +785,34 @@ def to_matrix(self) -> NDArrayf: """Convert the transform to a 4x4 transformation matrix.""" return self._to_matrix_func() + def to_translations(self) -> tuple[float, float, float]: + """ + Convert the affine transformation matrix to only its X/Y/Z translations. + + :return: Easting, northing and vertical translations (in georeferenced unit). + """ + + matrix = self.to_matrix() + shift_x = matrix[0, 3] + shift_y = matrix[1, 3] + shift_z = matrix[2, 3] + + return shift_x, shift_y, shift_z + + def to_rotations(self) -> tuple[float, float, float]: + """ + Convert the affine transformation to its X/Y/Z euler rotations, extrinsic convention. + + :return: Extrinsinc Euler rotations along easting, northing and vertical directions (degrees). + """ + + matrix = self.to_matrix() + rots = pytransform3d.rotations.euler_from_matrix(matrix, i=0, j=1, k=2, + extrinsic=True, + strict_check=True) + rots = np.rad2deg(np.array(rots)) + return rots[0], rots[1], rots[2] + def centroid(self) -> tuple[float, float, float] | None: """Get the centroid of the coregistration, if defined.""" meta_centroid = self._meta["outputs"]["affine"].get("centroid") @@ -928,7 +896,7 @@ def from_matrix(cls, matrix: NDArrayf) -> AffineCoreg: return cls(matrix=valid_matrix) @classmethod - def from_translation(cls, x_off: float = 0.0, y_off: float = 0.0, z_off: float = 0.0) -> AffineCoreg: + def from_translations(cls, x_off: float = 0.0, y_off: float = 0.0, z_off: float = 0.0) -> AffineCoreg: """ Instantiate a generic Coreg class from a X/Y/Z translation. @@ -940,13 +908,39 @@ def from_translation(cls, x_off: float = 0.0, y_off: float = 0.0, z_off: float = :returns: An instantiated generic Coreg class. """ + # Initialize a diagonal matrix matrix = np.diag(np.ones(4, dtype=float)) + # Add the three translations (which are in the last column) matrix[0, 3] = x_off matrix[1, 3] = y_off matrix[2, 3] = z_off return cls.from_matrix(matrix) + @classmethod + def from_rotations(cls, x_rot: float = 0.0, y_rot: float = 0.0, z_rot: float = 0.0) -> AffineCoreg: + """ + Instantiate a generic Coreg class from a X/Y/Z rotation. + + :param x_rot: The rotation (degrees) to apply around the X (west-east) direction. + :param y_rot: The rotation (degrees) to apply around the Y (south-north) direction. + :param z_rot: The rotation (degrees) to apply around the Z (vertical) direction. + + :raises ValueError: If the given rotation contained invalid values. + + :returns: An instantiated generic Coreg class. + """ + + # Initialize a diagonal matrix + matrix = np.diag(np.ones(4, dtype=float)) + # Convert rotations to radians + e = np.deg2rad(np.array([x_rot, y_rot, z_rot])) + # Derive 3x3 rotation matrix, and insert in 4x4 affine matrix + rot_matrix = pytransform3d.rotations.matrix_from_euler(e, i=0, j=1, k=2, extrinsic=True) + matrix[0:3, 0:3] = rot_matrix + + return cls.from_matrix(matrix) + def _to_matrix_func(self) -> NDArrayf: # FOR DEVELOPERS: This function needs to be implemented if the `self._meta['matrix']` keyword is not None. diff --git a/xdem/coreg/base.py b/xdem/coreg/base.py index 55c5524a..21623475 100644 --- a/xdem/coreg/base.py +++ b/xdem/coreg/base.py @@ -39,7 +39,7 @@ raster, subdivide_array, ) -from geoutils.raster.georeferencing import _bounds, _res +from geoutils.raster.georeferencing import _bounds, _res, _coords from geoutils.raster.interpolate import _interp_points from geoutils.raster.raster import _cast_pixel_interpretation, _shift_transform from tqdm import tqdm @@ -1217,7 +1217,7 @@ def _apply_matrix_rst( invert: bool = False, centroid: tuple[float, float, float] | None = None, resampling: Literal["nearest", "linear", "cubic", "quintic"] = "linear", - verbose: bool = True, + verbose: bool = False, force_regrid_method: Literal["iterative", "griddata"] | None = None, ) -> tuple[NDArrayf, rio.transform.Affine]: """ @@ -1282,6 +1282,65 @@ def _apply_matrix_rst( return new_dem, transform +@overload +def _reproject_horizontal_shift_samecrs( + raster_arr: NDArrayf, + src_transform: rio.transform.Affine, + dst_transform: rio.transform.Affine = None, + *, + return_interpolator: Literal[False] = False, + resampling: Literal["nearest", "linear", "cubic", "quintic", "slinear", "pchip", "splinef2d"] = "linear", +) -> NDArrayf: + ... + + +@overload +def _reproject_horizontal_shift_samecrs( + raster_arr: NDArrayf, + src_transform: rio.transform.Affine, + dst_transform: rio.transform.Affine = None, + *, + return_interpolator: Literal[True], + resampling: Literal["nearest", "linear", "cubic", "quintic", "slinear", "pchip", "splinef2d"] = "linear", +) -> Callable[[tuple[NDArrayf, NDArrayf]], NDArrayf]: + ... + + +def _reproject_horizontal_shift_samecrs( + raster_arr: NDArrayf, + src_transform: rio.transform.Affine, + dst_transform: rio.transform.Affine = None, + return_interpolator: bool = False, + resampling: Literal["nearest", "linear", "cubic", "quintic", "slinear", "pchip", "splinef2d"] = "linear", +) -> NDArrayf | Callable[[tuple[NDArrayf, NDArrayf]], NDArrayf]: + """ + Reproject a raster only for a horizontal shift (transform update) in the same CRS. + + This function exists independently of Raster.reproject() because Rasterio has unexplained reprojection issues + that can create non-negligible sub-pixel shifts that should be crucially avoided for coregistration. + See https://github.com/rasterio/rasterio/issues/2052#issuecomment-2078732477. + + Here we use SciPy interpolation instead, modified for nodata propagation in geoutils.interp_points(). + """ + + # We are reprojecting the raster array relative to itself without changing its pixel interpretation, so we can + # force any pixel interpretation (area_or_point) without it having any influence on the result, here "Area" + if not return_interpolator: + coords_dst = _coords(transform=dst_transform, area_or_point="Area", shape=raster_arr.shape) + # If we just want the interpolator, we don't need to coordinates of destination points + else: + coords_dst = None + + output = _interp_points( + array=raster_arr, + area_or_point="Area", + transform=src_transform, + points=coords_dst, + method=resampling, + return_interpolator=return_interpolator, + ) + + return output @overload def apply_matrix( @@ -1289,6 +1348,7 @@ def apply_matrix( matrix: NDArrayf, invert: bool = False, centroid: tuple[float, float, float] | None = None, + resample: bool = True, resampling: Literal["nearest", "linear", "cubic", "quintic"] = "linear", transform: rio.transform.Affine = None, z_name: str = "z", @@ -1303,6 +1363,7 @@ def apply_matrix( matrix: NDArrayf, invert: bool = False, centroid: tuple[float, float, float] | None = None, + resample: bool = True, resampling: Literal["nearest", "linear", "cubic", "quintic"] = "linear", transform: rio.transform.Affine = None, z_name: str = "z", @@ -1316,6 +1377,7 @@ def apply_matrix( matrix: NDArrayf, invert: bool = False, centroid: tuple[float, float, float] | None = None, + resample: bool = True, resampling: Literal["nearest", "linear", "cubic", "quintic"] = "linear", transform: rio.transform.Affine = None, z_name: str = "z", @@ -1346,6 +1408,8 @@ def apply_matrix( :param invert: Whether to invert the transformation matrix. :param centroid: The X/Y/Z transformation centroid. Irrelevant for pure translations. Defaults to the midpoint (Z=0). + :param resample: (For translations) If set to True, will resample output on the translated grid to match the input + transform. Otherwise, only the transform will be updated and no resampling is done. :param resampling: Point interpolation method, one of 'nearest', 'linear', 'cubic', or 'quintic'. For more information, see scipy.ndimage.map_coordinates and scipy.interpolate.interpn. Default is linear. :param transform: Geotransform of the DEM, only for DEM passed as 2D array. @@ -1355,15 +1419,18 @@ def apply_matrix( :return: Affine transformed elevation point cloud or DEM. """ + # Apply matrix to elevation point cloud if isinstance(elev, gpd.GeoDataFrame): return _apply_matrix_pts(epc=elev, matrix=matrix, invert=invert, centroid=centroid, z_name=z_name) + # Or apply matrix to raster (often requires re-gridding) else: + + # First, we apply the affine matrix for the array/transform if isinstance(elev, gu.Raster): transform = elev.transform dem = elev.data.filled(np.nan) else: dem = elev - applied_dem, out_transform = _apply_matrix_rst( dem=dem, transform=transform, @@ -1373,6 +1440,14 @@ def apply_matrix( resampling=resampling, **kwargs, ) + + # Then, if resample is True, we reproject the DEM from its out_transform onto the transform + if resample: + applied_dem = _reproject_horizontal_shift_samecrs(applied_dem, src_transform=out_transform, + dst_transform=transform, resampling=resampling) + out_transform = transform + + # We return a raster if input was a raster if isinstance(elev, gu.Raster): applied_dem = gu.Raster.from_array(applied_dem, out_transform, elev.crs, elev.nodata) return applied_dem diff --git a/xdem/terrain.py b/xdem/terrain.py index 50c319a8..fbeb90f6 100644 --- a/xdem/terrain.py +++ b/xdem/terrain.py @@ -540,7 +540,7 @@ def get_windowed_indexes( http://download.osgeo.org/qgis/doc/reference-docs/Terrain_Ruggedness_Index.pdf, for topography and from Wilson et al. (2007), http://dx.doi.org/10.1080/01490410701295962, for bathymetry. - Topographic Position Index from Weiss (2001), http://www.jennessent.com/downloads/TPI-poster-TNC_18x22.pdf. - - Roughness from Dartnell (2000), http://dx.doi.org/10.14358/PERS.70.9.1081. + - Roughness from Dartnell (2000), thesis referenced in Wilson et al. (2007) above. - Fractal roughness from Taud et Parrot (2005), https://doi.org/10.4000/geomorphologie.622. Nearly all are also referenced in Wilson et al. (2007). @@ -727,7 +727,7 @@ def get_terrain_attribute( - Terrain Ruggedness Index (topography) from Riley et al. (1999), http://download.osgeo.org/qgis/doc/reference-docs/Terrain_Ruggedness_Index.pdf. - Terrain Ruggedness Index (bathymetry) from Wilson et al. (2007), http://dx.doi.org/10.1080/01490410701295962. - - Roughness from Dartnell (2000), http://dx.doi.org/10.14358/PERS.70.9.1081. + - Roughness from Dartnell (2000), thesis referenced in Wilson et al. (2007) above. - Rugosity from Jenness (2004), https://doi.org/10.2193/0091-7648(2004)032[0829:CLSAFD]2.0.CO;2. - Fractal roughness from Taud et Parrot (2005), https://doi.org/10.4000/geomorphologie.622. @@ -1600,7 +1600,7 @@ def roughness(dem: NDArrayf | MArrayf | RasterType, window_size: int = 3) -> NDA Calculates the roughness, the maximum difference between neighbouring pixels, for any window size. Output is in the unit of the DEM (typically meters). - Based on: Dartnell (2000), http://dx.doi.org/10.14358/PERS.70.9.1081. + Based on: Dartnell (2000), https://environment.sfsu.edu/node/11292. :param dem: The DEM to calculate the roughness from. :param window_size: The size of the window for deriving the metric.