From bf772956cbf9fca70076b069ff8ad45e04c69b30 Mon Sep 17 00:00:00 2001 From: lbferreira Date: Thu, 12 Dec 2024 08:28:52 -0600 Subject: [PATCH 1/7] Fix nodata retrieval logic to ensure compatibility with rioxarray --- odc/geo/_xr_interop.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/odc/geo/_xr_interop.py b/odc/geo/_xr_interop.py index 2ca3cb0f..5005ef08 100644 --- a/odc/geo/_xr_interop.py +++ b/odc/geo/_xr_interop.py @@ -1024,11 +1024,11 @@ def nodata(self) -> Nodata: encoding = self._xx.encoding for k in ["nodata", "_FillValue"]: - nodata = attrs.get(k, ()) - if nodata == (): - nodata = encoding.get(k, ()) + nodata = attrs.get(k, numpy._NoValue) + if nodata is numpy._NoValue: + nodata = encoding.get(k, numpy._NoValue) - if nodata == (): + if nodata is numpy._NoValue: continue if nodata is None: From 62b7c5ebe41a3c7fc19f267daf06246e437ff830 Mon Sep 17 00:00:00 2001 From: Kirill Kouzoubov Date: Mon, 16 Dec 2024 10:13:19 +1100 Subject: [PATCH 2/7] Fix warnings about numpy._NoValue --- odc/geo/_xr_interop.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/odc/geo/_xr_interop.py b/odc/geo/_xr_interop.py index 5005ef08..4be83c07 100644 --- a/odc/geo/_xr_interop.py +++ b/odc/geo/_xr_interop.py @@ -50,7 +50,7 @@ ) from .overlap import compute_output_geobox from .roi import roi_is_empty -from .types import Nodata, Resolution, SomeNodata, SomeResolution, SomeShape, xy_ +from .types import Nodata, Resolution, SomeNodata, SomeResolution, SomeShape, Unset, xy_ # pylint: disable=import-outside-toplevel # pylint: disable=too-many-lines @@ -80,6 +80,8 @@ ("lat", "lon"), ] +_NoValue = Unset() + @dataclass class GeoState: @@ -1024,11 +1026,11 @@ def nodata(self) -> Nodata: encoding = self._xx.encoding for k in ["nodata", "_FillValue"]: - nodata = attrs.get(k, numpy._NoValue) - if nodata is numpy._NoValue: - nodata = encoding.get(k, numpy._NoValue) + nodata = attrs.get(k, _NoValue) + if nodata is _NoValue: + nodata = encoding.get(k, _NoValue) - if nodata is numpy._NoValue: + if nodata is _NoValue: continue if nodata is None: From 0b1a47c3d6ed02cedffd5d4e2c972bff02589c79 Mon Sep 17 00:00:00 2001 From: wietzesuijker Date: Mon, 16 Dec 2024 21:13:20 +0000 Subject: [PATCH 3/7] Chore: use built in types --- odc/geo/cog/_mpu.py | 23 +++++++-------- odc/geo/cog/_s3.py | 35 ++++++++++++----------- odc/geo/cog/_tifffile.py | 60 +++++++++++++++++++++------------------- 3 files changed, 61 insertions(+), 57 deletions(-) diff --git a/odc/geo/cog/_mpu.py b/odc/geo/cog/_mpu.py index 5832c139..f1776d8d 100644 --- a/odc/geo/cog/_mpu.py +++ b/odc/geo/cog/_mpu.py @@ -8,13 +8,10 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Iterable, Iterator, - List, Optional, Protocol, - Tuple, Union, ) @@ -35,9 +32,9 @@ class PartsWriter(Protocol): """Protocol for labeled parts data writer.""" - def __call__(self, part: int, data: SomeData) -> Dict[str, Any]: ... + def __call__(self, part: int, data: SomeData) -> dict[str, Any]: ... - def finalise(self, parts: List[Dict[str, Any]]) -> Any: ... + def finalise(self, parts: list[dict[str, Any]]) -> Any: ... @property def min_write_sz(self) -> int: ... @@ -76,8 +73,8 @@ def __init__( write_credits: int, data: Optional[bytearray] = None, left_data: Optional[bytearray] = None, - parts: Optional[List[Dict[str, Any]]] = None, - observed: Optional[List[Tuple[int, Any]]] = None, + parts: Optional[list[dict[str, Any]]] = None, + observed: Optional[list[tuple[int, Any]]] = None, is_final: bool = False, lhs_keep: int = 0, ) -> None: @@ -85,8 +82,8 @@ def __init__( self.write_credits = write_credits self.data = bytearray() if data is None else data self.left_data = bytearray() if left_data is None else left_data - self.parts: List[Dict[str, Any]] = [] if parts is None else parts - self.observed: List[Tuple[int, Any]] = [] if observed is None else observed + self.parts: list[dict[str, Any]] = [] if parts is None else parts + self.observed: list[tuple[int, Any]] = [] if observed is None else observed self.is_final = is_final self.lhs_keep = lhs_keep # if supplying data must also supply observed @@ -221,7 +218,7 @@ def flush( write: PartsWriter, leftPartId: Optional[int] = None, finalise: bool = True, - ) -> Tuple[int, Any]: + ) -> tuple[int, Any]: rr = None if not self.started_write: assert not self.left_data @@ -343,7 +340,7 @@ def from_dask_bag( @staticmethod def collate_substreams( - substreams: List["dask.bag.Item"], + substreams: list["dask.bag.Item"], *, write: Optional[PartsWriter] = None, spill_sz: int = 0, @@ -423,7 +420,7 @@ def mpu_write( def _mpu_collate_op( - substreams: List[MPUChunk], + substreams: list[MPUChunk], *, write: Optional[PartsWriter] = None, spill_sz: int = 0, @@ -439,7 +436,7 @@ def _mpu_collate_op( def _mpu_append_chunks_op( mpus: Iterable[MPUChunk], - chunks: Iterable[Tuple[bytes, Any]], + chunks: Iterable[tuple[bytes, Any]], write: Optional[PartsWriter] = None, spill_sz: int = 0, ): diff --git a/odc/geo/cog/_s3.py b/odc/geo/cog/_s3.py index b2b3a198..b88d44c3 100644 --- a/odc/geo/cog/_s3.py +++ b/odc/geo/cog/_s3.py @@ -5,7 +5,7 @@ from __future__ import annotations from threading import Lock -from typing import TYPE_CHECKING, Any, Optional, Tuple +from typing import TYPE_CHECKING, Any, Optional from cachetools import cached @@ -38,7 +38,7 @@ def _dask_client() -> "distributed.Client" | None: return None -def s3_parse_url(url: str) -> Tuple[str, str]: +def s3_parse_url(url: str) -> tuple[str, str]: if url.startswith("s3://"): bucket, *key = url[5:].split("/", 1) key = key[0] if len(key) else "" @@ -92,6 +92,7 @@ def __init__( @cached({}) def s3_client(self): + """Return the S3 client.""" # pylint: disable=import-outside-toplevel,import-error from botocore.session import Session @@ -108,15 +109,15 @@ def s3_client(self): ) def initiate(self, **kw) -> str: + """Initiate the S3 multipart upload.""" assert self.uploadId == "" s3 = self.s3_client() - rr = s3.create_multipart_upload(Bucket=self.bucket, Key=self.key, **kw) - uploadId = rr["UploadId"] - self.uploadId = uploadId - return uploadId + self.uploadId = rr["UploadId"] + return self.uploadId def write_part(self, part: int, data: SomeData) -> dict[str, Any]: + """Write a single part to S3.""" s3 = self.s3_client() assert self.uploadId != "" rr = s3.upload_part( @@ -126,31 +127,32 @@ def write_part(self, part: int, data: SomeData) -> dict[str, Any]: Key=self.key, UploadId=self.uploadId, ) - etag = rr["ETag"] - return {"PartNumber": part, "ETag": etag} + return {"PartNumber": part, "ETag": rr["ETag"]} @property def url(self) -> str: + """Return the S3 URL of the object.""" return f"s3://{self.bucket}/{self.key}" def finalise(self, parts: list[dict[str, Any]]) -> str: + """Finalise the multipart upload.""" s3 = self.s3_client() assert self.uploadId - rr = s3.complete_multipart_upload( Bucket=self.bucket, Key=self.key, UploadId=self.uploadId, MultipartUpload={"Parts": parts}, ) - return rr["ETag"] @property def started(self) -> bool: + """Check if the multipart upload has been initiated.""" return len(self.uploadId) > 0 def cancel(self, other: str = ""): + """Cancel the multipart upload.""" uploadId = other if other else self.uploadId if not uploadId: return @@ -169,23 +171,23 @@ def cancel(self, other: str = ""): if uploadId == self.uploadId: self.uploadId = "" - def list_active(self): + def list_active(self) -> list[str]: + """List active multipart uploads.""" s3 = self.s3_client() rr = s3.list_multipart_uploads(Bucket=self.bucket, Prefix=self.key) return [x["UploadId"] for x in rr.get("Uploads", [])] def read(self, **kw): + """Read the object directly from S3.""" s3 = self.s3_client() return s3.get_object(Bucket=self.bucket, Key=self.key, **kw)["Body"].read() def __dask_tokenize__(self): - return ( - self.bucket, - self.key, - self.uploadId, - ) + """Dask-specific tokenization for S3 uploads.""" + return (self.bucket, self.key, self.uploadId) def writer(self, kw, *, client: Any = None) -> PartsWriter: + """Return a Dask-compatible writer.""" if client is None: client = _dask_client() writer = DelayedS3Writer(self, kw) @@ -206,6 +208,7 @@ def upload( client: Any = None, **kw, ) -> "Delayed": + """Upload chunks to S3 with multipart uploads.""" write = self.writer(kw, client=client) if spill_sz else None return mpu_write( chunks, diff --git a/odc/geo/cog/_tifffile.py b/odc/geo/cog/_tifffile.py index 60bd1fac..bbd9010c 100644 --- a/odc/geo/cog/_tifffile.py +++ b/odc/geo/cog/_tifffile.py @@ -10,19 +10,21 @@ import itertools from functools import partial from io import BytesIO -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Optional, Union from xml.sax.saxutils import escape as xml_escape import numpy as np import xarray as xr + from .._interop import have from ..geobox import GeoBox from ..math import resolve_nodata from ..types import Shape2d, SomeNodata, Unset, shape_ +from ._az import MultiPartUpload as AzMultiPartUpload from ._mpu import mpu_write from ._mpu_fs import MPUFileSink -from ._s3 import MultiPartUpload, s3_parse_url +from ._s3 import MultiPartUpload as S3MultiPartUpload, s3_parse_url from ._shared import ( GDAL_COMP, GEOTIFF_TAGS, @@ -45,7 +47,7 @@ def _render_gdal_metadata( precision: int = 10, pad: int = 0, eol: str = "", - gdal_metadata_extra: Optional[List[str]] = None, + gdal_metadata_extra: Optional[list[str]] = None, ) -> str: def _item(sample: int, stats: dict[str, float]) -> str: return eol.join( @@ -117,7 +119,7 @@ def _stats_from_layer( def _make_empty_cog( - shape: Tuple[int, ...], + shape: tuple[int, ...], dtype: Any, gbox: Optional[GeoBox] = None, *, @@ -126,10 +128,10 @@ def _make_empty_cog( compression: Union[str, Unset] = Unset(), compressionargs: Any = None, predictor: Union[int, bool, Unset] = Unset(), - blocksize: Union[int, List[Union[int, Tuple[int, int]]]] = 2048, + blocksize: Union[int, list[Union[int, tuple[int, int]]]] = 2048, bigtiff: bool = True, **kw, -) -> Tuple[CogMeta, memoryview]: +) -> tuple[CogMeta, memoryview]: # pylint: disable=import-outside-toplevel,import-error have.check_or_error("tifffile", "rasterio", "xarray") from tifffile import ( @@ -184,7 +186,7 @@ def _make_empty_cog( **kw, } - def _sh(shape: Shape2d) -> Tuple[int, ...]: + def _sh(shape: Shape2d) -> tuple[int, ...]: if ax == "YX": return shape.shape if ax == "YXS": @@ -194,7 +196,7 @@ def _sh(shape: Shape2d) -> Tuple[int, ...]: tsz = norm_blocksize(blocksize[-1]) im_shape, _, nlevels = compute_cog_spec(im_shape, tsz) - extratags: List[Tuple[int, int, int, Any]] = [] + extratags: list[tuple[int, int, int, Any]] = [] if gbox is not None: gbox = gbox.expand(im_shape) extratags, _ = geotiff_metadata( @@ -205,7 +207,7 @@ def _sh(shape: Shape2d) -> Tuple[int, ...]: _blocks = itertools.chain(iter(blocksize), itertools.repeat(blocksize[-1])) tw = TiffWriter(buf, bigtiff=bigtiff, shaped=False) - metas: List[CogMeta] = [] + metas: list[CogMeta] = [] for tsz, idx in zip(_blocks, range(nlevels + 1)): tile = norm_blocksize(tsz) @@ -250,7 +252,7 @@ def _sh(shape: Shape2d) -> Tuple[int, ...]: def _cog_block_compressor_yxs( block: np.ndarray, *, - tile_shape: Tuple[int, ...] = (), + tile_shape: tuple[int, ...] = (), encoder: Any = None, predictor: Any = None, fill_value: Union[float, int] = 0, @@ -275,7 +277,7 @@ def _cog_block_compressor_yxs( def _cog_block_compressor_syx( block: np.ndarray, *, - tile_shape: Tuple[int, int] = (0, 0), + tile_shape: tuple[int, int] = (0, 0), encoder: Any = None, predictor: Any = None, fill_value: Union[float, int] = 0, @@ -376,7 +378,7 @@ def _compress_tiles( if meta.axis == "SYX": src_ydim = 1 if data.ndim == 2: - _chunks: Tuple[int, ...] = meta.tile.yx + _chunks: tuple[int, ...] = meta.tile.yx elif len(data.chunks[0]) == 1: # if 1 single chunk with all "samples", keep it that way _chunks = (data.shape[0], *meta.tile.yx) @@ -433,7 +435,7 @@ def _pyramids_from_cog_metadata( xx: xr.DataArray, cog_meta: CogMeta, resampling: Union[str, int] = "nearest", -) -> Tuple[xr.DataArray, ...]: +) -> tuple[xr.DataArray, ...]: out = [xx] for mm in cog_meta.overviews: @@ -447,9 +449,9 @@ def _pyramids_from_cog_metadata( def _extract_tile_info( meta: CogMeta, - tiles: List[Tuple[int, int, int, int, int]], + tiles: list[tuple[int, int, int, int, int]], start_offset: int = 0, -) -> List[Tuple[List[int], List[int]]]: +) -> list[tuple[list[int], list[int]]]: mm = meta.flatten() tile_info = [([0] * m.num_tiles, [0] * m.num_tiles) for m in mm] @@ -468,11 +470,11 @@ def _extract_tile_info( def _patch_hdr( - tiles: List[Tuple[int, Tuple[int, int, int, int]]], + tiles: list[tuple[int, tuple[int, int, int, int]]], meta: CogMeta, hdr0: bytes, stats: Optional[list[dict[str, float]]] = None, - gdal_metadata_extra: Optional[List[str]] = None, + gdal_metadata_extra: Optional[list[str]] = None, ) -> bytes: # pylint: disable=import-outside-toplevel,import-error from tifffile import TiffFile, TiffPage @@ -525,8 +527,8 @@ def _norm_compression_tifffile( compression: Union[str, Unset] = Unset(), compressionargs: Any = None, level: Optional[Union[int, float]] = None, - kw: Optional[Dict[str, Any]] = None, -) -> Tuple[int, str, Dict[str, Any]]: + kw: Optional[dict[str, Any]] = None, +) -> tuple[int, str, dict[str, Any]]: if kw is None: kw = {} if isinstance(compression, Unset): @@ -593,7 +595,7 @@ def _gdal_sample_description(sample: int, description: str) -> str: return f'{double_escaped_description}' -def _band_names(xx: xr.DataArray) -> List[str]: +def _band_names(xx: xr.DataArray) -> list[str]: if "band" in xx.coords and xx.coords["band"].dtype.type is np.str_: return list(xx["band"].values) if "long_name" in xx.attrs: @@ -602,7 +604,7 @@ def _band_names(xx: xr.DataArray) -> List[str]: return [] -def _gdal_sample_descriptions(descriptions: List[str]) -> List[str]: +def _gdal_sample_descriptions(descriptions: list[str]) -> list[str]: """Convert band names to GDAL sample descriptions. :return: List of GDAL XML metadata lines to place in TIFF file. @@ -621,19 +623,20 @@ def save_cog_with_dask( compressionargs: Any = None, level: Optional[Union[int, float]] = None, predictor: Union[int, bool, Unset] = Unset(), - blocksize: Union[Unset, int, List[Union[int, Tuple[int, int]]]] = Unset(), + blocksize: Union[Unset, int, list[Union[int, tuple[int, int]]]] = Unset(), bigtiff: bool = True, overview_resampling: Union[int, str] = "nearest", - aws: Optional[Dict[str, Any]] = None, + aws: Optional[dict[str, Any]] = None, + azure: Optional[dict[str, Any]] = None, client: Any = None, stats: bool | int = True, **kw, ) -> Any: """ - Save a Cloud Optimized GeoTIFF to S3 or file with Dask. + Save a Cloud Optimized GeoTIFF to S3, Azure Blob Storage, or file with Dask. :param xx: Pixels as :py:class:`xarray.DataArray` backed by Dask - :param dst: S3 url or a file path on shared storage + :param dst: S3, Azure URL, or file path :param compression: Compression to use, default is ``DEFLATE`` :param level: Compression "level", depends on chosen compression :param predictor: TIFF predictor setting @@ -711,7 +714,8 @@ def save_cog_with_dask( layers[stats].data, nodata=xx_odc.nodata, yaxis=xx_odc.ydim ) - _tiles: List["dask.bag.Bag"] = [] + # Prepare tiles + _tiles: list["dask.bag.Bag"] = [] for scale_idx, (mm, img) in enumerate(zip(meta.flatten(), layers)): for sample_idx in range(meta.num_planes): tt = _compress_tiles(img, mm, scale_idx=scale_idx, sample_idx=sample_idx) @@ -778,7 +782,7 @@ def geotiff_metadata( geobox: GeoBox, nodata: SomeNodata = "auto", gdal_metadata: Optional[str] = None, -) -> Tuple[List[Tuple[int, int, int, Any]], Dict[str, Any]]: +) -> tuple[list[tuple[int, int, int, Any]], dict[str, Any]]: """ Convert GeoBox to geotiff tags and metadata for :py:mod:`tifffile`. @@ -815,7 +819,7 @@ def _dtype_as_int(dtype) -> int: return dtype return dtype.value - geo_tags: List[Tuple[int, int, int, Any]] = [ + geo_tags: list[tuple[int, int, int, Any]] = [ (t.code, _dtype_as_int(t.dtype), t.count, t.value) for t in tf.pages.first.tags.values() if t.code in GEOTIFF_TAGS From 002e8365eda8992333bcfe21db3cfd8b53347213 Mon Sep 17 00:00:00 2001 From: wietzesuijker Date: Mon, 16 Dec 2024 21:23:33 +0000 Subject: [PATCH 4/7] Chore: spelling ize -> ise --- odc/geo/cog/_s3.py | 4 ++-- odc/geo/cog/_tifffile.py | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/odc/geo/cog/_s3.py b/odc/geo/cog/_s3.py index b88d44c3..6da5cf85 100644 --- a/odc/geo/cog/_s3.py +++ b/odc/geo/cog/_s3.py @@ -282,7 +282,7 @@ def _ensure_init(self, final_write: bool = False) -> MultiPartUpload: uploadId = _safe_get(shared_state, 0.1) if uploadId is not None: - # someone else initialized it + # someone else initialised it mpu.uploadId = uploadId return mpu @@ -290,7 +290,7 @@ def _ensure_init(self, final_write: bool = False) -> MultiPartUpload: with lock: uploadId = _safe_get(shared_state, 0.1) if uploadId is not None: - # someone else initialized it while we were getting a lock + # someone else initialised it while we were getting a lock mpu.uploadId = uploadId return mpu diff --git a/odc/geo/cog/_tifffile.py b/odc/geo/cog/_tifffile.py index bbd9010c..634354c2 100644 --- a/odc/geo/cog/_tifffile.py +++ b/odc/geo/cog/_tifffile.py @@ -627,16 +627,15 @@ def save_cog_with_dask( bigtiff: bool = True, overview_resampling: Union[int, str] = "nearest", aws: Optional[dict[str, Any]] = None, - azure: Optional[dict[str, Any]] = None, client: Any = None, stats: bool | int = True, **kw, ) -> Any: """ - Save a Cloud Optimized GeoTIFF to S3, Azure Blob Storage, or file with Dask. + Save a Cloud Optimized GeoTIFF to S3 or file with Dask. :param xx: Pixels as :py:class:`xarray.DataArray` backed by Dask - :param dst: S3, Azure URL, or file path + :param dst: S3 url or a file path on shared storage :param compression: Compression to use, default is ``DEFLATE`` :param level: Compression "level", depends on chosen compression :param predictor: TIFF predictor setting @@ -662,27 +661,28 @@ def save_cog_with_dask( upload_params.update( {k: aws.pop(k) for k in ["writes_per_chunk", "spill_sz"] if k in aws} ) - parts_base = kw.pop("parts_base", None) - # normalize compression and remove GDAL compat options from kw + # Normalise compression settings and remove GDAL compat options from kw predictor, compression, compressionargs = _norm_compression_tifffile( xx.dtype, predictor, compression, compressionargs, level=level, kw=kw ) + xx_odc = xx.odc assert isinstance(xx_odc, ODCExtensionDa) assert isinstance(xx_odc.geobox, GeoBox) or xx_odc.geobox is None ydim = xx_odc.ydim - data_chunks: Tuple[int, int] = xx.data.chunksize[ydim : ydim + 2] + data_chunks: tuple[int, int] = xx.data.chunksize[ydim : ydim + 2] if isinstance(blocksize, Unset): blocksize = [data_chunks, int(max(*data_chunks) // 2)] + # Metadata band_names = _band_names(xx) sample_descriptions_metadata = _gdal_sample_descriptions(band_names) - no_metadata = (stats is False) and not band_names - gdal_metadata = None if no_metadata else "" + gdal_metadata = None if stats is False and not band_names else "" + # Prepare COG metadata and header meta, hdr0 = _make_empty_cog( xx.shape, xx.dtype, From 4330b8399000ccb5275bf631b608810148fa2ef3 Mon Sep 17 00:00:00 2001 From: wietzesuijker Date: Mon, 16 Dec 2024 21:27:44 +0000 Subject: [PATCH 5/7] Docs: function docstr --- odc/geo/cog/_tifffile.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/odc/geo/cog/_tifffile.py b/odc/geo/cog/_tifffile.py index 634354c2..34066293 100644 --- a/odc/geo/cog/_tifffile.py +++ b/odc/geo/cog/_tifffile.py @@ -21,10 +21,9 @@ from ..geobox import GeoBox from ..math import resolve_nodata from ..types import Shape2d, SomeNodata, Unset, shape_ -from ._az import MultiPartUpload as AzMultiPartUpload from ._mpu import mpu_write from ._mpu_fs import MPUFileSink -from ._s3 import MultiPartUpload as S3MultiPartUpload, s3_parse_url +from ._s3 import MultiPartUpload, s3_parse_url from ._shared import ( GDAL_COMP, GEOTIFF_TAGS, From ade0406999aef15aa9f90d13a7d4a98e6fce7d39 Mon Sep 17 00:00:00 2001 From: wietzesuijker Date: Wed, 18 Dec 2024 22:03:41 +0000 Subject: [PATCH 6/7] Chore: bump python to 3.10 --- dev-env.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-env.yml b/dev-env.yml index 0f87332b..2b432f4b 100644 --- a/dev-env.yml +++ b/dev-env.yml @@ -3,7 +3,7 @@ channels: - conda-forge dependencies: - - python =3.8 + - python =3.10 # odc-geo dependencies - pyproj From 9439ecc7773cdfe87f063d184bdb51950beb1f1c Mon Sep 17 00:00:00 2001 From: Ariana Barzinpour Date: Thu, 19 Dec 2024 04:21:12 +0000 Subject: [PATCH 7/7] use _crs for hash instead of _str --- odc/geo/crs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/odc/geo/crs.py b/odc/geo/crs.py index 8d70a76c..b682fc78 100644 --- a/odc/geo/crs.py +++ b/odc/geo/crs.py @@ -242,7 +242,7 @@ def __str__(self) -> str: return self._str def __hash__(self) -> int: - return hash(self._str) + return hash(self._crs) def __repr__(self) -> str: return f"CRS('{self._str}')"