diff --git a/latest/.doctrees/environment.pickle b/latest/.doctrees/environment.pickle index ea97e7f7..c39038c6 100644 Binary files a/latest/.doctrees/environment.pickle and b/latest/.doctrees/environment.pickle differ diff --git a/latest/_modules/rioxarray/_io.html b/latest/_modules/rioxarray/_io.html index 5e777cf3..286e603b 100644 --- a/latest/_modules/rioxarray/_io.html +++ b/latest/_modules/rioxarray/_io.html @@ -1036,8 +1036,12 @@
variables.pop_to(
result.attrs, result.encoding, "scale_factor", name=da_name
)
+ if "scales" in result.attrs:
+ variables.pop_to(result.attrs, result.encoding, "scales", name=da_name)
if "add_offset" in result.attrs:
variables.pop_to(result.attrs, result.encoding, "add_offset", name=da_name)
+ if "offsets" in result.attrs:
+ variables.pop_to(result.attrs, result.encoding, "offsets", name=da_name)
if masked:
if "_FillValue" in result.attrs:
variables.pop_to(result.attrs, result.encoding, "_FillValue", name=da_name)
diff --git a/latest/_modules/rioxarray/merge.html b/latest/_modules/rioxarray/merge.html
index aca8db3a..8d5b0141 100644
--- a/latest/_modules/rioxarray/merge.html
+++ b/latest/_modules/rioxarray/merge.html
@@ -121,12 +121,16 @@ Source code for rioxarray.merge
"crs": self.crs,
"nodata": self.nodatavals[0],
}
- self._scale_factor = self._xds.encoding.get("scale_factor", 1.0)
- self._add_offset = self._xds.encoding.get("add_offset", 0.0)
+ valid_scale_factor = self._xds.encoding.get("scale_factor", 1) != 1 or any(
+ scale != 1 for scale in self._xds.encoding.get("scales", (1,))
+ )
+ valid_offset = self._xds.encoding.get("add_offset", 0.0) != 0 or any(
+ offset != 0 for offset in self._xds.encoding.get("offsets", (0,))
+ )
self._mask_and_scale = (
self._xds.rio.encoded_nodata is not None
- or self._scale_factor != 1
- or self._add_offset != 0
+ or valid_scale_factor
+ or valid_offset
or self._xds.encoding.get("_Unsigned") is not None
)
@@ -149,10 +153,9 @@ Source code for rioxarray.merge
kwargs["masked"] = True
out = dataset.read(*args, **kwargs)
if self._mask_and_scale:
- if self._scale_factor != 1:
- out = out * self._scale_factor
- if self._add_offset != 0:
- out = out + self._add_offset
+ out = out.astype(self._xds.dtype)
+ for iii in range(self.count):
+ out[iii] = out[iii] * dataset.scales[iii] + dataset.offsets[iii]
return out
diff --git a/latest/_modules/rioxarray/raster_array.html b/latest/_modules/rioxarray/raster_array.html
index bcd2cfee..21ca1c0e 100644
--- a/latest/_modules/rioxarray/raster_array.html
+++ b/latest/_modules/rioxarray/raster_array.html
@@ -88,6 +88,7 @@ Source code for rioxarray.raster_array
- https://github.com/opendatacube/datacube-core/blob/1d345f08a10a13c316f81100936b0ad8b1a374eb/LICENSE # noqa: E501
"""
+
import copy
import os
from collections.abc import Hashable, Iterable, Mapping
@@ -207,7 +208,13 @@ Source code for rioxarray.raster_array
**kwargs,
):
"""Determine the affine of the new projected `xarray.DataArray`"""
- src_bounds = () if "gcps" in kwargs else src_data_array.rio.bounds()
+ src_bounds = ()
+ if (
+ "gcps" not in kwargs
+ and "rpcs" not in kwargs
+ and "src_geoloc_array" not in kwargs
+ ):
+ src_bounds = src_data_array.rio.bounds()
src_height, src_width = src_data_array.rio.shape
dst_height, dst_width = dst_shape if dst_shape is not None else (None, None)
# pylint: disable=isinstance-second-argument-not-valid-type
@@ -542,8 +549,12 @@ Source code for rioxarray.raster_array
if gcps:
kwargs.setdefault("gcps", gcps)
- gcps_or_rpcs = "gcps" in kwargs or "rpcs" in kwargs
- src_affine = None if gcps_or_rpcs else self.transform(recalc=True)
+ use_affine = (
+ "gcps" not in kwargs
+ and "rpcs" not in kwargs
+ and "src_geoloc_array" not in kwargs
+ )
+ src_affine = None if not use_affine else self.transform(recalc=True)
if transform is None:
dst_affine, dst_width, dst_height = _make_dst_affine(
src_data_array=self._obj,
@@ -563,7 +574,6 @@ Source code for rioxarray.raster_array
dst_data = self._create_dst_data(dst_height=dst_height, dst_width=dst_width)
dst_nodata = self._get_dst_nodata(nodata)
-
rasterio.warp.reproject(
source=self._obj.values,
destination=dst_data,
@@ -595,7 +605,7 @@ Source code for rioxarray.raster_array
dst_affine=dst_affine,
dst_width=dst_width,
dst_height=dst_height,
- force_generate=gcps_or_rpcs,
+ force_generate=not use_affine,
),
dims=tuple(dst_dims),
attrs=new_attrs,
diff --git a/latest/_modules/rioxarray/raster_dataset.html b/latest/_modules/rioxarray/raster_dataset.html
index e0f4d932..2e3a2712 100644
--- a/latest/_modules/rioxarray/raster_dataset.html
+++ b/latest/_modules/rioxarray/raster_dataset.html
@@ -610,35 +610,58 @@ Source code for rioxarray.raster_dataset
is True. Otherwise None is returned.
"""
+ # pylint: disable=too-many-locals
variable_dim = f"band_{uuid4()}"
data_array = self._obj.to_array(dim=variable_dim)
# ensure raster metadata preserved
- scales = []
- offsets = []
- nodatavals = []
+ attr_scales = []
+ attr_offsets = []
+ attr_nodatavals = []
+ encoded_scales = []
+ encoded_offsets = []
+ encoded_nodatavals = []
band_tags = []
long_name = []
for data_var in data_array[variable_dim].values:
- scales.append(self._obj[data_var].attrs.get("scale_factor", 1.0))
- offsets.append(self._obj[data_var].attrs.get("add_offset", 0.0))
+ try:
+ encoded_scales.append(self._obj[data_var].encoding["scale_factor"])
+ except KeyError:
+ attr_scales.append(self._obj[data_var].attrs.get("scale_factor", 1.0))
+ try:
+ encoded_offsets.append(self._obj[data_var].encoding["add_offset"])
+ except KeyError:
+ attr_offsets.append(self._obj[data_var].attrs.get("add_offset", 0.0))
long_name.append(self._obj[data_var].attrs.get("long_name", data_var))
- nodatavals.append(self._obj[data_var].rio.nodata)
+ if self._obj[data_var].rio.encoded_nodata is not None:
+ encoded_nodatavals.append(self._obj[data_var].rio.encoded_nodata)
+ else:
+ attr_nodatavals.append(self._obj[data_var].rio.nodata)
band_tags.append(self._obj[data_var].attrs.copy())
- data_array.attrs["scales"] = scales
- data_array.attrs["offsets"] = offsets
+ if encoded_scales:
+ data_array.encoding["scales"] = encoded_scales
+ else:
+ data_array.attrs["scales"] = attr_scales
+ if encoded_offsets:
+ data_array.encoding["offsets"] = encoded_offsets
+ else:
+ data_array.attrs["offsets"] = attr_offsets
data_array.attrs["band_tags"] = band_tags
data_array.attrs["long_name"] = long_name
+ use_encoded_nodatavals = bool(encoded_nodatavals)
+ nodatavals = encoded_nodatavals if use_encoded_nodatavals else attr_nodatavals
nodata = nodatavals[0]
if (
all(nodataval == nodata for nodataval in nodatavals)
or numpy.isnan(nodatavals).all()
):
- data_array.rio.write_nodata(nodata, inplace=True)
+ data_array.rio.write_nodata(
+ nodata, inplace=True, encoded=use_encoded_nodatavals
+ )
else:
raise RioXarrayError(
"All nodata values must be the same when exporting to raster. "
- f"Current values: {nodatavals}"
+ f"Current values: {attr_nodatavals}"
)
if self.crs is not None:
data_array.rio.write_crs(self.crs, inplace=True)