Skip to content

Commit

Permalink
CLN: Standardize imports to only use 'as' when naming conflicts arise (
Browse files Browse the repository at this point in the history
  • Loading branch information
snowman2 authored Jul 26, 2023
1 parent 15b7fcc commit 11092a8
Show file tree
Hide file tree
Showing 10 changed files with 141 additions and 137 deletions.
66 changes: 33 additions & 33 deletions rioxarray/_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from collections.abc import Hashable, Iterable
from typing import Any, Optional, Union

import numpy as np
import numpy
import rasterio
from numpy.typing import NDArray
from packaging import version
Expand Down Expand Up @@ -171,11 +171,11 @@ def _get_unsigned_dtype(unsigned, dtype):
"""
Based on: https://github.com/pydata/xarray/blob/abe1e613a96b000ae603c53d135828df532b952e/xarray/coding/variables.py#L306-L334
"""
dtype = np.dtype(dtype)
dtype = numpy.dtype(dtype)
if unsigned is True and dtype.kind == "i":
return np.dtype(f"u{dtype.itemsize}")
return numpy.dtype(f"u{dtype.itemsize}")
if unsigned is False and dtype.kind == "u":
return np.dtype(f"i{dtype.itemsize}")
return numpy.dtype(f"i{dtype.itemsize}")
return None


Expand Down Expand Up @@ -314,7 +314,7 @@ def __init__(
self._unsigned_dtype = None
self._fill_value = riods.nodata
dtypes = riods.dtypes
if not np.all(np.asarray(dtypes) == dtypes[0]):
if not numpy.all(numpy.asarray(dtypes) == dtypes[0]):
raise ValueError("All bands should have the same dtype")

dtype = _rasterio_to_numpy_dtype(dtypes)
Expand Down Expand Up @@ -385,9 +385,9 @@ def _get_indexer(self, key):
# bands (axis=0) cannot be windowed but they can be listed
if isinstance(band_key, slice):
start, stop, step = band_key.indices(self.shape[0])
band_key = np.arange(start, stop, step)
band_key = numpy.arange(start, stop, step)
# be sure we give out a list
band_key = (np.asarray(band_key) + 1).tolist()
band_key = (numpy.asarray(band_key) + 1).tolist()
if isinstance(band_key, list): # if band_key is not a scalar
np_inds.append(slice(None))

Expand All @@ -406,13 +406,13 @@ def _get_indexer(self, key):
start = ikey
stop = ikey + 1
else:
start, stop = np.min(ikey), np.max(ikey) + 1
start, stop = numpy.min(ikey), numpy.max(ikey) + 1
np_inds.append(ikey - start)
window.append((start, stop))

if isinstance(key[1], np.ndarray) and isinstance(key[2], np.ndarray):
if isinstance(key[1], numpy.ndarray) and isinstance(key[2], numpy.ndarray):
# do outer-style indexing
np_inds[-2:] = np.ix_(*np_inds[-2:])
np_inds[-2:] = numpy.ix_(*np_inds[-2:])

return band_key, tuple(window), tuple(squeeze_axis), tuple(np_inds)

Expand All @@ -421,7 +421,7 @@ def _getitem(self, key):
if not band_key or any(start == stop for (start, stop) in window):
# no need to do IO
shape = (len(band_key),) + tuple(stop - start for (start, stop) in window)
out = np.zeros(shape, dtype=self.dtype)
out = numpy.zeros(shape, dtype=self.dtype)
else:
with self.lock:
riods = _ensure_warped_vrt(
Expand All @@ -431,22 +431,22 @@ def _getitem(self, key):
if self._unsigned_dtype is not None:
out = out.astype(self._unsigned_dtype)
if self.masked:
out = np.ma.filled(out.astype(self.dtype), self.fill_value)
out = numpy.ma.filled(out.astype(self.dtype), self.fill_value)
if self.mask_and_scale:
if not isinstance(band_key, Iterable):
out = (
out * riods.scales[band_key - 1]
+ riods.offsets[band_key - 1]
)
else:
for iii, band_iii in enumerate(np.atleast_1d(band_key) - 1):
for iii, band_iii in enumerate(numpy.atleast_1d(band_key) - 1):
out[iii] = (
out[iii] * riods.scales[band_iii]
+ riods.offsets[band_iii]
)

if squeeze_axis:
out = np.squeeze(out, axis=squeeze_axis)
out = numpy.squeeze(out, axis=squeeze_axis)
return out[np_inds]

def __getitem__(self, key):
Expand Down Expand Up @@ -475,7 +475,7 @@ def _parse_envi(meta):
"""

def parsevec(value):
return np.fromstring(value.strip("{}"), dtype="float", sep=",")
return numpy.fromstring(value.strip("{}"), dtype="float", sep=",")

def default(value):
return value.strip("{}")
Expand All @@ -487,11 +487,11 @@ def default(value):

def _rasterio_to_numpy_dtype(dtypes):
"""Numpy dtype from first entry of rasterio dataset.dtypes"""
# rasterio has some special dtype names (complex_int16 -> np.complex64)
# rasterio has some special dtype names (complex_int16 -> numpy.complex64)
if dtypes[0] == "complex_int16":
dtype = np.dtype("complex64")
dtype = numpy.dtype("complex64")
else:
dtype = np.dtype(dtypes[0])
dtype = numpy.dtype(dtypes[0])

return dtype

Expand All @@ -515,7 +515,7 @@ def _parse_tag(key: str, value: Any) -> tuple[str, Any]:
key = key.split("NC_GLOBAL#")[-1]
if value.startswith("{") and value.endswith("}"):
try:
new_val = np.fromstring(value.strip("{}"), dtype="float", sep=",")
new_val = numpy.fromstring(value.strip("{}"), dtype="float", sep=",")
# pylint: disable=len-as-condition
value = new_val if len(new_val) else _to_numeric(value)
except ValueError:
Expand All @@ -535,17 +535,17 @@ def _parse_tags(tags: dict) -> dict:

NETCDF_DTYPE_MAP = {
0: object, # NC_NAT
1: np.byte, # NC_BYTE
2: np.char, # NC_CHAR
3: np.short, # NC_SHORT
4: np.int_, # NC_INT, NC_LONG
1: numpy.byte, # NC_BYTE
2: numpy.char, # NC_CHAR
3: numpy.short, # NC_SHORT
4: numpy.int_, # NC_INT, NC_LONG
5: float, # NC_FLOAT
6: np.double, # NC_DOUBLE
7: np.ubyte, # NC_UBYTE
8: np.ushort, # NC_USHORT
9: np.uint, # NC_UINT
10: np.int64, # NC_INT64
11: np.uint64, # NC_UINT64
6: numpy.double, # NC_DOUBLE
7: numpy.ubyte, # NC_UBYTE
8: numpy.ushort, # NC_USHORT
9: numpy.uint, # NC_UINT
10: numpy.int64, # NC_INT64
11: numpy.uint64, # NC_UINT64
12: object, # NC_STRING
}

Expand Down Expand Up @@ -581,7 +581,7 @@ def _parse_netcdf_attr_array(attr: Union[NDArray, str], dtype=None) -> NDArray:
value = [attr]
else:
value = attr
return np.array(value, dtype=dtype)
return numpy.array(value, dtype=dtype)


def _load_netcdf_1d_coords(tags: dict) -> dict:
Expand Down Expand Up @@ -753,8 +753,8 @@ def _parse_driver_tags(
for key, value in meta.items():
# Add values as coordinates if they match the band count,
# as attributes otherwise
if isinstance(value, (list, np.ndarray)) and len(value) == riods.count:
coords[key] = ("band", np.asarray(value))
if isinstance(value, (list, numpy.ndarray)) and len(value) == riods.count:
coords[key] = ("band", numpy.asarray(value))
else:
attrs[key] = value

Expand Down Expand Up @@ -1193,7 +1193,7 @@ def open_rasterio(
break
else:
coord_name = "band"
coords[coord_name] = np.asarray(riods.indexes)
coords[coord_name] = numpy.asarray(riods.indexes)

has_gcps = riods.gcps[0]
if has_gcps:
Expand Down
58 changes: 30 additions & 28 deletions rioxarray/raster_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from pathlib import Path
from typing import Any, Literal, Optional, Union

import numpy as np
import numpy
import rasterio
import rasterio.mask
import rasterio.warp
Expand Down Expand Up @@ -170,9 +170,11 @@ def _clip_from_disk(
invert=invert,
crop=drop,
)
if xds.rio.encoded_nodata is not None and not np.isnan(xds.rio.encoded_nodata):
out_image = out_image.astype(np.float64)
out_image[out_image == xds.rio.encoded_nodata] = np.nan
if xds.rio.encoded_nodata is not None and not numpy.isnan(
xds.rio.encoded_nodata
):
out_image = out_image.astype(numpy.float64)
out_image[out_image == xds.rio.encoded_nodata] = numpy.nan

height, width = out_image.shape[-2:]
cropped_ds = xarray.DataArray(
Expand Down Expand Up @@ -216,10 +218,10 @@ def _clip_xarray(
)
cropped_ds = cropped_ds.rio.isel_window(
rasterio.windows.get_data_window(
np.ma.masked_array(clip_mask_arr, ~clip_mask_arr)
numpy.ma.masked_array(clip_mask_arr, ~clip_mask_arr)
)
)
if xds.rio.nodata is not None and not np.isnan(xds.rio.nodata):
if xds.rio.nodata is not None and not numpy.isnan(xds.rio.nodata):
cropped_ds = cropped_ds.fillna(xds.rio.nodata)

return cropped_ds.astype(xds.dtype)
Expand Down Expand Up @@ -502,15 +504,15 @@ def _get_dst_nodata(self, nodata: Optional[float]) -> Optional[float]:
dst_nodata = default_nodata if nodata is None else nodata
return dst_nodata

def _create_dst_data(self, dst_height: int, dst_width: int) -> np.ndarray:
def _create_dst_data(self, dst_height: int, dst_width: int) -> numpy.ndarray:
extra_dim = self._check_dimensions()
if extra_dim:
dst_data = np.zeros(
dst_data = numpy.zeros(
(self._obj[extra_dim].size, dst_height, dst_width),
dtype=self._obj.dtype.type,
)
else:
dst_data = np.zeros((dst_height, dst_width), dtype=self._obj.dtype.type)
dst_data = numpy.zeros((dst_height, dst_width), dtype=self._obj.dtype.type)
return dst_data

def reproject_match(
Expand Down Expand Up @@ -601,7 +603,7 @@ def pad_xy(
Maximum bound for y coordinate.
constant_values: scalar, tuple or mapping of hashable to tuple
The value used for padding. If None, nodata will be used if it is
set, and np.nan otherwise.
set, and numpy.nan otherwise.
Returns
Expand All @@ -614,33 +616,33 @@ def pad_xy(
resolution_x, resolution_y = self.resolution()
y_before = y_after = 0
x_before = x_after = 0
y_coord: Union[xarray.DataArray, np.ndarray] = self._obj[self.y_dim]
x_coord: Union[xarray.DataArray, np.ndarray] = self._obj[self.x_dim]
y_coord: Union[xarray.DataArray, numpy.ndarray] = self._obj[self.y_dim]
x_coord: Union[xarray.DataArray, numpy.ndarray] = self._obj[self.x_dim]

if top - resolution_y < maxy:
new_y_coord: np.ndarray = np.arange(bottom, maxy, -resolution_y)[::-1]
new_y_coord: numpy.ndarray = numpy.arange(bottom, maxy, -resolution_y)[::-1]
y_before = len(new_y_coord) - len(y_coord)
y_coord = new_y_coord
top = y_coord[0]
if bottom + resolution_y > miny:
new_y_coord = np.arange(top, miny, resolution_y)
new_y_coord = numpy.arange(top, miny, resolution_y)
y_after = len(new_y_coord) - len(y_coord)
y_coord = new_y_coord
bottom = y_coord[-1]

if left - resolution_x > minx:
new_x_coord: np.ndarray = np.arange(right, minx, -resolution_x)[::-1]
new_x_coord: numpy.ndarray = numpy.arange(right, minx, -resolution_x)[::-1]
x_before = len(new_x_coord) - len(x_coord)
x_coord = new_x_coord
left = x_coord[0]
if right + resolution_x < maxx:
new_x_coord = np.arange(left, maxx, resolution_x)
new_x_coord = numpy.arange(left, maxx, resolution_x)
x_after = len(new_x_coord) - len(x_coord)
x_coord = new_x_coord
right = x_coord[-1]

if constant_values is None:
constant_values = np.nan if self.nodata is None else self.nodata
constant_values = numpy.nan if self.nodata is None else self.nodata

superset = self._obj.pad(
pad_width={
Expand Down Expand Up @@ -680,7 +682,7 @@ def pad_box(
Maximum bound for y coordinate.
constant_values: scalar, tuple or mapping of hashable to tuple
The value used for padding. If None, nodata will be used if it is
set, and np.nan otherwise.
set, and numpy.nan otherwise.
Returns
Expand Down Expand Up @@ -790,10 +792,10 @@ def clip_box(
window_error = None
try:
window = rasterio.windows.from_bounds(
left=np.array(left).item(),
bottom=np.array(bottom).item(),
right=np.array(right).item(),
top=np.array(top).item(),
left=numpy.array(left).item(),
bottom=numpy.array(bottom).item(),
right=numpy.array(right).item(),
top=numpy.array(top).item(),
transform=self.transform(recalc=True),
)
cl_array: xarray.DataArray = self.isel_window(window) # type: ignore
Expand Down Expand Up @@ -929,7 +931,7 @@ def clip(

def _interpolate_na(
self, src_data: Any, method: Literal["linear", "nearest", "cubic"] = "nearest"
) -> np.ndarray:
) -> numpy.ndarray:
"""
This method uses scipy.interpolate.griddata to interpolate missing data.
Expand All @@ -956,18 +958,18 @@ def _interpolate_na(

src_data_flat = src_data.flatten()
try:
data_isnan = np.isnan(self.nodata) # type: ignore
data_isnan = numpy.isnan(self.nodata) # type: ignore
except TypeError:
data_isnan = False
if not data_isnan:
data_bool = src_data_flat != self.nodata
else:
data_bool = ~np.isnan(src_data_flat)
data_bool = ~numpy.isnan(src_data_flat)

if not data_bool.any():
return src_data

x_coords, y_coords = np.meshgrid(
x_coords, y_coords = numpy.meshgrid(
self._obj.coords[self.x_dim].values, self._obj.coords[self.y_dim].values
)

Expand Down Expand Up @@ -1010,7 +1012,7 @@ def interpolate_na(
interp_data.append(
self._interpolate_na(sub_xds.load().data, method=method)
)
interp_data = np.array(interp_data) # type: ignore
interp_data = numpy.array(interp_data) # type: ignore
else:
interp_data = self._interpolate_na(self._obj.load().data, method=method) # type: ignore

Expand All @@ -1032,7 +1034,7 @@ def to_raster(
self,
raster_path: Union[str, os.PathLike],
driver: Optional[str] = None,
dtype: Optional[Union[str, np.dtype]] = None,
dtype: Optional[Union[str, numpy.dtype]] = None,
tags: Optional[dict[str, str]] = None,
windowed: bool = False,
recalc_transform: bool = True,
Expand Down
Loading

0 comments on commit 11092a8

Please sign in to comment.