"""
This module is an extension for xarray to provide rasterio capabilities
to xarray datasets/dataarrays.
"""
# pylint: disable=too-many-lines
import json
import math
import warnings
from collections.abc import Hashable, Iterable
from typing import Any, Literal, Optional, Union
import numpy
import pyproj
import rasterio.warp
import rasterio.windows
import xarray
from affine import Affine
from pyproj.aoi import AreaOfInterest
from pyproj.database import query_utm_crs_info
from rasterio.control import GroundControlPoint
from rasterio.crs import CRS
from rasterio.rpc import RPC
from rioxarray._options import EXPORT_GRID_MAPPING, get_option
from rioxarray._spatial_utils import ( # noqa: F401, pylint: disable=unused-import
DEFAULT_GRID_MAP,
_affine_has_rotation,
_convert_gcps_to_geojson,
_get_data_var_message,
_get_spatial_dims,
_has_spatial_dims,
_order_bounds,
_resolution,
affine_to_coords,
)
from rioxarray.crs import crs_from_user_input
from rioxarray.exceptions import (
DimensionError,
DimensionMissingCoordinateError,
InvalidDimensionOrder,
MissingCRS,
MissingSpatialDimensionError,
NoDataInBounds,
OneDimensionalRaster,
RioXarrayError,
TooManyDimensions,
)
[docs]
class XRasterBase:
"""This is the base class for the GIS extensions for xarray"""
# pylint: disable=too-many-instance-attributes
def __init__(self, xarray_obj: Union[xarray.DataArray, xarray.Dataset]):
self._obj: Union[xarray.DataArray, xarray.Dataset] = xarray_obj
self._x_dim: Optional[Hashable] = None
self._y_dim: Optional[Hashable] = None
# Determine the spatial dimensions of the `xarray.DataArray`
if "x" in self._obj.dims and "y" in self._obj.dims:
self._x_dim = "x"
self._y_dim = "y"
elif "longitude" in self._obj.dims and "latitude" in self._obj.dims:
self._x_dim = "longitude"
self._y_dim = "latitude"
else:
# look for coordinates with CF attributes
for coord in self._obj.coords:
# make sure to only look in 1D coordinates
# that has the same dimension name as the coordinate
if self._obj.coords[coord].dims != (coord,):
continue
if (self._obj.coords[coord].attrs.get("axis", "").upper() == "X") or (
self._obj.coords[coord].attrs.get("standard_name", "").lower()
in ("longitude", "projection_x_coordinate")
):
self._x_dim = coord
elif (self._obj.coords[coord].attrs.get("axis", "").upper() == "Y") or (
self._obj.coords[coord].attrs.get("standard_name", "").lower()
in ("latitude", "projection_y_coordinate")
):
self._y_dim = coord
# properties
self._count: Optional[int] = None
self._height: Optional[int] = None
self._width: Optional[int] = None
self._crs: Union[rasterio.crs.CRS, None, Literal[False]] = None
self._gcps: Optional[list[GroundControlPoint]] = None
self._rpcs: Optional[RPC] = None
@property
def crs(self) -> Optional[rasterio.crs.CRS]:
""":obj:`rasterio.crs.CRS`:
Retrieve projection from :obj:`xarray.Dataset` | :obj:`xarray.DataArray`
"""
if self._crs is not None:
return None if self._crs is False else self._crs
# look in wkt attributes to avoid using
# pyproj CRS if possible for performance
for crs_attr in ("spatial_ref", "crs_wkt"):
try:
self._set_crs(
self._obj.coords[self.grid_mapping].attrs[crs_attr],
inplace=True,
)
return self._crs
except KeyError:
pass
# look in grid_mapping
try:
self._set_crs(
pyproj.CRS.from_cf(self._obj.coords[self.grid_mapping].attrs),
inplace=True,
)
except (KeyError, pyproj.exceptions.CRSError):
try:
# look in attrs for 'crs'
self._set_crs(self._obj.attrs["crs"], inplace=True)
except KeyError:
self._crs = False
return None
return self._crs
def _get_obj(self, inplace: bool) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Get the object to modify.
Parameters
----------
inplace: bool
If True, returns self.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`
"""
if inplace:
return self._obj
obj_copy = self._obj.copy(deep=True)
# preserve attribute information
obj_copy.rio._x_dim = self._x_dim
obj_copy.rio._y_dim = self._y_dim
obj_copy.rio._width = self._width
obj_copy.rio._height = self._height
obj_copy.rio._crs = self._crs
obj_copy.rio._gcps = self._gcps
obj_copy.rio._rpcs = self._rpcs
return obj_copy
[docs]
def set_crs(
self, input_crs: Any, inplace: bool = True
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Set the CRS value for the Dataset/DataArray without modifying
the dataset/data array.
.. deprecated:: 0.15.8
It is recommended to use `rio.write_crs()` instead. This
method will likely be removed in a future release.
Parameters
----------
input_crs: object
Anything accepted by `rasterio.crs.CRS.from_user_input`.
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
Dataset with crs attribute.
"""
warnings.warn(
"It is recommended to use 'rio.write_crs()' instead. 'rio.set_crs()' will likely"
"be removed in a future release.",
FutureWarning,
stacklevel=2,
)
return self._set_crs(input_crs, inplace=inplace)
def _set_crs(
self, input_crs: Any, inplace: bool = True
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Set the CRS value for the Dataset/DataArray without modifying
the dataset/data array.
Parameters
----------
input_crs: object
Anything accepted by `rasterio.crs.CRS.from_user_input`.
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
xarray.Dataset | xarray.DataArray
Dataset with crs attribute.
"""
crs = crs_from_user_input(input_crs)
obj = self._get_obj(inplace=inplace)
obj.rio._crs = crs
return obj
@property
def grid_mapping(self) -> str:
"""
str: The CF grid_mapping attribute. 'spatial_ref' is the default.
"""
grid_mapping = self._obj.encoding.get(
"grid_mapping", self._obj.attrs.get("grid_mapping")
)
if grid_mapping is not None:
return grid_mapping
grid_mapping = DEFAULT_GRID_MAP
# search the dataset for the grid mapping name
if hasattr(self._obj, "data_vars"):
grid_mappings = set()
for var in self._obj.data_vars:
if not _has_spatial_dims(self._obj, var=var):
continue
var_grid_mapping = self._obj[var].encoding.get(
"grid_mapping", self._obj[var].attrs.get("grid_mapping")
)
if var_grid_mapping is not None:
grid_mapping = var_grid_mapping
grid_mappings.add(grid_mapping)
if len(grid_mappings) > 1:
raise RioXarrayError("Multiple grid mappings exist.")
return grid_mapping
[docs]
def write_grid_mapping(
self, grid_mapping_name: str = DEFAULT_GRID_MAP, inplace: bool = False
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Write the CF grid_mapping attribute to the encoding.
Parameters
----------
grid_mapping_name: str, optional
Name of the grid_mapping coordinate.
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
Modified dataset with CF compliant CRS information.
"""
data_obj = self._get_obj(inplace=inplace)
if hasattr(data_obj, "data_vars"):
for var in data_obj.data_vars:
try:
x_dim, y_dim = _get_spatial_dims(data_obj, var=var)
except MissingSpatialDimensionError:
continue
# remove grid_mapping from attributes if it exists
# and update the grid_mapping in encoding
new_attrs = dict(data_obj[var].attrs)
new_attrs.pop("grid_mapping", None)
data_obj[var].rio.update_encoding(
{"grid_mapping": grid_mapping_name}, inplace=True
).rio.set_attrs(new_attrs, inplace=True).rio.set_spatial_dims(
x_dim=x_dim, y_dim=y_dim, inplace=True
)
# remove grid_mapping from attributes if it exists
# and update the grid_mapping in encoding
new_attrs = dict(data_obj.attrs)
new_attrs.pop("grid_mapping", None)
return data_obj.rio.update_encoding(
{"grid_mapping": grid_mapping_name}, inplace=True
).rio.set_attrs(new_attrs, inplace=True)
[docs]
def write_crs(
self,
input_crs: Optional[Any] = None,
grid_mapping_name: Optional[str] = None,
inplace: bool = False,
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Write the CRS to the dataset in a CF compliant manner.
.. warning:: The grid_mapping attribute is written to the encoding.
Parameters
----------
input_crs: Any
Anything accepted by `rasterio.crs.CRS.from_user_input`.
grid_mapping_name: str, optional
Name of the grid_mapping coordinate to store the CRS information in.
Default is the grid_mapping name of the dataset.
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
Modified dataset with CF compliant CRS information.
Examples
--------
Write the CRS of the current `xarray` object:
>>> raster.rio.write_crs("epsg:4326", inplace=True)
Write the CRS on a copy:
>>> raster = raster.rio.write_crs("epsg:4326")
"""
if input_crs is not None:
data_obj = self._set_crs(input_crs, inplace=inplace)
else:
data_obj = self._get_obj(inplace=inplace)
# get original transform
transform = self._cached_transform()
# remove old grid maping coordinate if exists
grid_mapping_name = (
self.grid_mapping if grid_mapping_name is None else grid_mapping_name
)
try:
del data_obj.coords[grid_mapping_name]
except KeyError:
pass
if data_obj.rio.crs is None:
raise MissingCRS(
"CRS not found. Please set the CRS with 'rio.write_crs()'."
)
# add grid mapping coordinate
data_obj.coords[grid_mapping_name] = xarray.Variable((), 0)
grid_map_attrs = {}
if get_option(EXPORT_GRID_MAPPING):
try:
grid_map_attrs = pyproj.CRS.from_user_input(data_obj.rio.crs).to_cf()
except KeyError:
pass
# spatial_ref is for compatibility with GDAL
crs_wkt = data_obj.rio.crs.to_wkt()
grid_map_attrs["spatial_ref"] = crs_wkt
grid_map_attrs["crs_wkt"] = crs_wkt
if transform is not None:
grid_map_attrs["GeoTransform"] = " ".join(
[str(item) for item in transform.to_gdal()]
)
data_obj.coords[grid_mapping_name].rio.set_attrs(grid_map_attrs, inplace=True)
# remove old crs if exists
data_obj.attrs.pop("crs", None)
return data_obj.rio.write_grid_mapping(
grid_mapping_name=grid_mapping_name, inplace=True
)
[docs]
def estimate_utm_crs(self, datum_name: str = "WGS 84") -> rasterio.crs.CRS:
"""Returns the estimated UTM CRS based on the bounds of the dataset.
.. versionadded:: 0.2
.. note:: Requires pyproj 3+
Parameters
----------
datum_name : str, optional
The name of the datum to use in the query. Default is WGS 84.
Returns
-------
rasterio.crs.CRS
"""
if self.crs is None:
raise RuntimeError("crs must be set to estimate UTM CRS.")
# ensure using geographic coordinates
if self.crs.is_geographic: # pylint: disable=no-member
minx, miny, maxx, maxy = self.bounds(recalc=True)
else:
minx, miny, maxx, maxy = self.transform_bounds("EPSG:4326", recalc=True)
x_center = numpy.mean([minx, maxx]).item()
y_center = numpy.mean([miny, maxy]).item()
utm_crs_list = query_utm_crs_info(
datum_name=datum_name,
area_of_interest=AreaOfInterest(
west_lon_degree=x_center,
south_lat_degree=y_center,
east_lon_degree=x_center,
north_lat_degree=y_center,
),
)
try:
return CRS.from_epsg(utm_crs_list[0].code)
except IndexError:
raise RuntimeError("Unable to determine UTM CRS") from None
def _cached_transform(self) -> Optional[Affine]:
"""
Get the transform from:
1. The GeoTransform metatada property in the grid mapping
2. The transform attribute.
"""
try:
# look in grid_mapping
transform = numpy.fromstring(
self._obj.coords[self.grid_mapping].attrs["GeoTransform"], sep=" "
)
# Calling .tolist() to assure the arguments are Python float and JSON serializable
return Affine.from_gdal(*transform.tolist())
except KeyError:
try:
return Affine(*self._obj.attrs["transform"][:6])
except KeyError:
pass
return None
[docs]
def write_coordinate_system(
self, inplace: bool = False
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Write the coordinate system CF metadata.
.. versionadded:: 0.0.30
Parameters
----------
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
The dataset with the CF coordinate system attributes added.
"""
data_obj = self._get_obj(inplace=inplace)
# add metadata to x,y coordinates
is_projected = data_obj.rio.crs and data_obj.rio.crs.is_projected
is_geographic = data_obj.rio.crs and data_obj.rio.crs.is_geographic
x_coord_attrs = dict(data_obj.coords[self.x_dim].attrs)
x_coord_attrs["axis"] = "X"
y_coord_attrs = dict(data_obj.coords[self.y_dim].attrs)
y_coord_attrs["axis"] = "Y"
if is_projected:
units = None
if hasattr(data_obj.rio.crs, "linear_units_factor"):
unit_factor = data_obj.rio.crs.linear_units_factor[-1]
if unit_factor != 1:
units = f"{unit_factor} metre"
else:
units = "metre"
# X metadata
x_coord_attrs["long_name"] = "x coordinate of projection"
x_coord_attrs["standard_name"] = "projection_x_coordinate"
if units:
x_coord_attrs["units"] = units
# Y metadata
y_coord_attrs["long_name"] = "y coordinate of projection"
y_coord_attrs["standard_name"] = "projection_y_coordinate"
if units:
y_coord_attrs["units"] = units
elif is_geographic:
# X metadata
x_coord_attrs["long_name"] = "longitude"
x_coord_attrs["standard_name"] = "longitude"
x_coord_attrs["units"] = "degrees_east"
# Y metadata
y_coord_attrs["long_name"] = "latitude"
y_coord_attrs["standard_name"] = "latitude"
y_coord_attrs["units"] = "degrees_north"
data_obj.coords[self.y_dim].attrs = y_coord_attrs
data_obj.coords[self.x_dim].attrs = x_coord_attrs
return data_obj
[docs]
def set_attrs(
self, new_attrs: dict, inplace: bool = False
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Set the attributes of the dataset/dataarray and reset
rioxarray properties to re-search for them.
Parameters
----------
new_attrs: dict
A dictionary of new attributes.
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
Modified dataset with new attributes.
"""
data_obj = self._get_obj(inplace=inplace)
# set the attributes
data_obj.attrs = new_attrs
# reset rioxarray properties depending
# on attributes to be generated
data_obj.rio._nodata = None
data_obj.rio._crs = None
return data_obj
[docs]
def update_attrs(
self, new_attrs: dict, inplace: bool = False
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Update the attributes of the dataset/dataarray and reset
rioxarray properties to re-search for them.
Parameters
----------
new_attrs: dict
A dictionary of new attributes to update with.
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
Modified dataset with updated attributes.
"""
data_attrs = dict(self._obj.attrs)
data_attrs.update(**new_attrs)
return self.set_attrs(data_attrs, inplace=inplace)
[docs]
def set_encoding(
self, new_encoding: dict, inplace: bool = False
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Set the encoding of the dataset/dataarray and reset
rioxarray properties to re-search for them.
.. versionadded:: 0.4
Parameters
----------
new_encoding: dict
A dictionary for encoding.
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
Modified dataset with new attributes.
"""
data_obj = self._get_obj(inplace=inplace)
# set the attributes
data_obj.encoding = new_encoding
# reset rioxarray properties depending
# on attributes to be generated
data_obj.rio._nodata = None
data_obj.rio._crs = None
return data_obj
[docs]
def update_encoding(
self, new_encoding: dict, inplace: bool = False
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Update the encoding of the dataset/dataarray and reset
rioxarray properties to re-search for them.
.. versionadded:: 0.4
Parameters
----------
new_encoding: dict
A dictionary with encoding values to update with.
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
Modified dataset with updated attributes.
"""
data_encoding = dict(self._obj.encoding)
data_encoding.update(**new_encoding)
return self.set_encoding(data_encoding, inplace=inplace)
[docs]
def set_spatial_dims(
self, x_dim: str, y_dim: str, inplace: bool = True
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
This sets the spatial dimensions of the dataset.
Parameters
----------
x_dim: str
The name of the x dimension.
y_dim: str
The name of the y dimension.
inplace: bool, optional
If True, it will modify the dataframe in place.
Otherwise it will return a modified copy.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
Dataset with spatial dimensions set.
"""
data_obj = self._get_obj(inplace=inplace)
if x_dim in data_obj.dims:
data_obj.rio._x_dim = x_dim
else:
raise MissingSpatialDimensionError(
f"x dimension ({x_dim}) not found.{_get_data_var_message(data_obj)}"
)
if y_dim in data_obj.dims:
data_obj.rio._y_dim = y_dim
else:
raise MissingSpatialDimensionError(
f"y dimension ({y_dim}) not found.{_get_data_var_message(data_obj)}"
)
return data_obj
@property
def x_dim(self) -> Hashable:
"""Hashable: The dimension for the X-axis."""
if self._x_dim is not None:
return self._x_dim
raise MissingSpatialDimensionError(
"x dimension not found. 'rio.set_spatial_dims()' or "
"using 'rename()' to change the dimension name to 'x' can address this."
f"{_get_data_var_message(self._obj)}"
)
@property
def y_dim(self) -> Hashable:
"""Hashable: The dimension for the Y-axis."""
if self._y_dim is not None:
return self._y_dim
raise MissingSpatialDimensionError(
"y dimension not found. 'rio.set_spatial_dims()' or "
"using 'rename()' to change the dimension name to 'y' can address this."
f"{_get_data_var_message(self._obj)}"
)
@property
def width(self) -> int:
"""int: Returns the width of the dataset (x dimension size)"""
if self._width is not None:
return self._width
self._width = self._obj[self.x_dim].size
return self._width
@property
def height(self) -> int:
"""int: Returns the height of the dataset (y dimension size)"""
if self._height is not None:
return self._height
self._height = self._obj[self.y_dim].size
return self._height
@property
def shape(self) -> tuple[int, int]:
"""tuple(int, int): Returns the shape (height, width)"""
return (self.height, self.width)
def _check_dimensions(self) -> Optional[str]:
"""
This function validates that the dimensions 2D/3D and
they are are in the proper order.
Returns
-------
str or None: Name extra dimension.
"""
extra_dims = tuple(set(list(self._obj.dims)) - {self.x_dim, self.y_dim})
if len(extra_dims) > 1:
raise TooManyDimensions(
"Only 2D and 3D data arrays supported."
f"{_get_data_var_message(self._obj)}"
)
if extra_dims and self._obj.dims != (extra_dims[0], self.y_dim, self.x_dim):
dim_info: tuple = (extra_dims[0], self.y_dim, self.x_dim)
raise InvalidDimensionOrder(
f"Invalid dimension order. Expected order: {dim_info}. "
f"You can use `DataArray.transpose{dim_info}`"
" to reorder your dimensions."
f"{_get_data_var_message(self._obj)}"
)
if not extra_dims and self._obj.dims != (self.y_dim, self.x_dim):
dim_info = (self.y_dim, self.x_dim)
raise InvalidDimensionOrder(
f"Invalid dimension order. Expected order: {dim_info}. "
f"You can use `DataArray.transpose{dim_info}`"
" to reorder your dimensions."
f"{_get_data_var_message(self._obj)}"
)
return str(extra_dims[0]) if extra_dims else None
@property
def count(self) -> int:
"""int: Returns the band count (z dimension size)"""
if self._count is not None:
return self._count
extra_dim = self._check_dimensions()
self._count = 1
if extra_dim is not None:
self._count = self._obj[extra_dim].size
return self._count
def _internal_bounds(self) -> tuple[float, float, float, float]:
"""Determine the internal bounds of the `xarray.DataArray`"""
if self.x_dim not in self._obj.coords:
raise DimensionMissingCoordinateError(f"{self.x_dim} missing coordinates.")
if self.y_dim not in self._obj.coords:
raise DimensionMissingCoordinateError(f"{self.y_dim} missing coordinates.")
try:
left = float(self._obj[self.x_dim][0])
right = float(self._obj[self.x_dim][-1])
top = float(self._obj[self.y_dim][0])
bottom = float(self._obj[self.y_dim][-1])
except IndexError:
raise NoDataInBounds(
"Unable to determine bounds from coordinates."
f"{_get_data_var_message(self._obj)}"
) from None
return left, bottom, right, top
[docs]
def resolution(self, recalc: bool = False) -> tuple[float, float]:
"""
Determine if the resolution of the grid.
If the transformation has rotation, the sign of the resolution is lost.
Parameters
----------
recalc: bool, optional
Will force the resolution to be recalculated instead of using the
transform attribute.
Returns
-------
x_resolution, y_resolution: float
The resolution of the `xarray.DataArray` | `xarray.Dataset`
"""
transform = self._cached_transform()
if (
not recalc or self.width == 1 or self.height == 1
) and transform is not None:
return _resolution(transform)
# if the coordinates of the spatial dimensions are missing
# use the cached transform resolution
try:
left, bottom, right, top = self._internal_bounds()
except DimensionMissingCoordinateError:
if transform is None:
raise
return _resolution(transform)
if self.width == 1 or self.height == 1:
raise OneDimensionalRaster(
"Only 1 dimenional array found. Cannot calculate the resolution."
f"{_get_data_var_message(self._obj)}"
)
resolution_x = (right - left) / (self.width - 1)
resolution_y = (bottom - top) / (self.height - 1)
return resolution_x, resolution_y
def _unordered_bounds(
self, recalc: bool = False
) -> tuple[float, float, float, float]:
"""
Unordered bounds.
Parameters
----------
recalc: bool, optional
Will force the bounds to be recalculated instead of using the
transform attribute.
Returns
-------
left, bottom, right, top: float
Outermost coordinates of the `xarray.DataArray` | `xarray.Dataset`.
"""
resolution_x, resolution_y = self.resolution(recalc=recalc)
try:
# attempt to get bounds from xarray coordinate values
left, bottom, right, top = self._internal_bounds()
left -= resolution_x / 2.0
right += resolution_x / 2.0
top -= resolution_y / 2.0
bottom += resolution_y / 2.0
except DimensionMissingCoordinateError as error:
transform = self._cached_transform()
if not transform:
raise RioXarrayError("Transform not able to be determined.") from error
left = transform.c
top = transform.f
right = left + resolution_x * self.width
bottom = top + resolution_y * self.height
return left, bottom, right, top
[docs]
def bounds(self, *, recalc: bool = False) -> tuple[float, float, float, float]:
"""
Parameters
----------
recalc: bool, optional
Will force the bounds to be recalculated instead of using the
transform attribute.
Returns
-------
left, bottom, right, top: float
Outermost coordinates of the `xarray.DataArray` | `xarray.Dataset`.
"""
minx, miny, maxx, maxy = self._unordered_bounds(recalc=recalc)
resolution_x, resolution_y = self.resolution(recalc=recalc)
return _order_bounds(
minx=minx,
miny=miny,
maxx=maxx,
maxy=maxy,
resolution_x=resolution_x,
resolution_y=resolution_y,
)
[docs]
def isel_window(
self, window: rasterio.windows.Window, *, pad: bool = False
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Use a rasterio.windows.Window to select a subset of the data.
.. versionadded:: 0.6.0 pad
.. warning:: Float indices are converted to integers.
Parameters
----------
window: :class:`rasterio.windows.Window`
The window of the dataset to read.
pad: bool, default=False
Set to True to expand returned DataArray to dimensions of the window
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
The data in the window.
"""
(row_start, row_stop), (col_start, col_stop) = window.toranges()
row_start = 0 if row_start < 0 else math.floor(row_start)
row_stop = 0 if row_stop < 0 else math.ceil(row_stop)
col_start = 0 if col_start < 0 else math.floor(col_start)
col_stop = 0 if col_stop < 0 else math.ceil(col_stop)
row_slice = slice(int(row_start), int(row_stop))
col_slice = slice(int(col_start), int(col_stop))
array_subset = (
self._obj.isel({self.y_dim: row_slice, self.x_dim: col_slice})
.copy() # this is to prevent sharing coordinates with the original dataset
.rio.set_spatial_dims(x_dim=self.x_dim, y_dim=self.y_dim, inplace=True)
.rio.write_transform(
transform=rasterio.windows.transform(
rasterio.windows.Window.from_slices(
rows=row_slice,
cols=col_slice,
width=self.width,
height=self.height,
),
self.transform(recalc=True),
),
inplace=True,
)
)
if pad:
return array_subset.rio.pad_box(
*rasterio.windows.bounds(window, self.transform(recalc=True))
)
return array_subset
[docs]
def slice_xy(
self,
minx: float,
miny: float,
maxx: float,
maxy: float,
) -> Union[xarray.Dataset, xarray.DataArray]:
"""Slice the array by x,y bounds.
Parameters
----------
minx: float
Minimum bound for x coordinate.
miny: float
Minimum bound for y coordinate.
maxx: float
Maximum bound for x coordinate.
maxy: float
Maximum bound for y coordinate.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
The data in the slice.
"""
left, bottom, right, top = self._internal_bounds()
if top > bottom:
y_slice = slice(maxy, miny)
else:
y_slice = slice(miny, maxy)
if left > right:
x_slice = slice(maxx, minx)
else:
x_slice = slice(minx, maxx)
subset = (
self._obj.sel({self.x_dim: x_slice, self.y_dim: y_slice})
.copy() # this is to prevent sharing coordinates with the original dataset
.rio.set_spatial_dims(x_dim=self.x_dim, y_dim=self.y_dim, inplace=True)
.rio.write_transform(inplace=True)
)
return subset
[docs]
def write_gcps(
self,
gcps: Iterable[GroundControlPoint],
gcp_crs: Any,
*,
grid_mapping_name: Optional[str] = None,
inplace: bool = False,
) -> Union[xarray.Dataset, xarray.DataArray]:
"""
Write the GroundControlPoints to the dataset.
https://rasterio.readthedocs.io/en/latest/topics/georeferencing.html#ground-control-points
Parameters
----------
gcp: list of :obj:`rasterio.control.GroundControlPoint`
The Ground Control Points to integrate to the dataset.
gcp_crs: str, :obj:`rasterio.crs.CRS`, or dict
Coordinate reference system for the GCPs.
grid_mapping_name: str, optional
Name of the grid_mapping coordinate to store the GCPs information in.
Default is the grid_mapping name of the dataset.
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
Modified dataset with Ground Control Points written.
"""
grid_mapping_name = (
self.grid_mapping if grid_mapping_name is None else grid_mapping_name
)
data_obj = self._get_obj(inplace=True)
if gcp_crs:
data_obj = data_obj.rio.write_crs(
gcp_crs, grid_mapping_name=grid_mapping_name, inplace=inplace
)
try:
grid_map_attrs = data_obj.coords[grid_mapping_name].attrs.copy()
except KeyError:
data_obj.coords[grid_mapping_name] = xarray.Variable((), 0)
grid_map_attrs = data_obj.coords[grid_mapping_name].attrs.copy()
geojson_gcps = _convert_gcps_to_geojson(gcps)
grid_map_attrs["gcps"] = json.dumps(geojson_gcps)
data_obj.coords[grid_mapping_name].rio.set_attrs(grid_map_attrs, inplace=True)
self._gcps = list(gcps)
return data_obj
[docs]
def get_gcps(self) -> Optional[list[GroundControlPoint]]:
"""
Get the GroundControlPoints from the dataset.
https://rasterio.readthedocs.io/en/latest/topics/georeferencing.html#ground-control-points
Returns
-------
list of :obj:`rasterio.control.GroundControlPoint` or None
The Ground Control Points from the dataset or None if not applicable
"""
if self._gcps is not None:
return self._gcps
try:
geojson_gcps = json.loads(self._obj.coords[self.grid_mapping].attrs["gcps"])
except (KeyError, AttributeError):
return None
def _parse_gcp(gcp) -> GroundControlPoint:
x, y, *z = gcp["geometry"]["coordinates"]
z = z[0] if z else None
return GroundControlPoint(
x=x,
y=y,
z=z,
row=gcp["properties"]["row"],
col=gcp["properties"]["col"],
id=gcp["properties"]["id"],
info=gcp["properties"]["info"],
)
self._gcps = [_parse_gcp(gcp) for gcp in geojson_gcps["features"]]
return self._gcps
[docs]
def write_rpcs(
self,
rpcs: RPC,
*,
grid_mapping_name: Optional[str] = None,
inplace: bool = False,
) -> xarray.Dataset | xarray.DataArray:
"""
Write the Rational Polynomial Coefficients to the dataset.
https://rasterio.readthedocs.io/en/latest/topics/georeferencing.html#rational-polynomial-coefficients
Parameters
----------
rpcs: :obj:`rasterio.rpc.RPC`
The Rational Polynomial Coefficients to integrate to the dataset.
grid_mapping_name: str, optional
Name of the grid_mapping coordinate to store the RPCs information in.
Default is the grid_mapping name of the dataset.
inplace: bool, optional
If True, it will write to the existing dataset. Default is False.
Returns
-------
:obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
Modified dataset with Rational Polynomial Coefficients written.
"""
grid_mapping_name = (
self.grid_mapping if grid_mapping_name is None else grid_mapping_name
)
data_obj = self._get_obj(inplace=True)
# RPC CRS is always 4326
data_obj = data_obj.rio.write_crs(
"epsg:4326", grid_mapping_name=grid_mapping_name, inplace=inplace
)
try:
grid_map_attrs = data_obj.coords[grid_mapping_name].attrs.copy()
except KeyError:
data_obj.coords[grid_mapping_name] = xarray.Variable((), 0)
grid_map_attrs = data_obj.coords[grid_mapping_name].attrs.copy()
# Store the RPCCs
grid_map_attrs["rpcs"] = json.dumps(rpcs.to_dict())
data_obj.coords[grid_mapping_name].rio.set_attrs(grid_map_attrs, inplace=True)
self._rpcs = rpcs
return data_obj
[docs]
def get_rpcs(self) -> Optional[RPC]:
"""
Get the Rational Polynomial Coefficients from the dataset.
https://rasterio.readthedocs.io/en/latest/topics/georeferencing.html#rational-polynomial-coefficients
Returns
-------
:obj:`rasterio.rpc.RPC` or None
The Rational Polynomial Coefficients from the dataset or None if not applicable
"""
if self._rpcs is not None:
return self._rpcs
try:
json_rpcs = json.loads(self._obj.coords[self.grid_mapping].attrs["rpcs"])
except (KeyError, AttributeError):
return None
self._rpcs = RPC(**json_rpcs)
return self._rpcs