From 99941c189c5a96c5582bfcd94db4319d463be6b6 Mon Sep 17 00:00:00 2001 From: Andy Aschwanden Date: Tue, 14 Nov 2023 09:11:11 -0900 Subject: [PATCH] Removed trajectory code Moving the trajectory code to a seperate repo removes the dependency on GDAL for this repo, simplifying installation. --- .github/workflows/python-package.yml | 3 +- pism_ragis/__init__.py | 2 - pism_ragis/interpolation.py | 390 --------------------------- pism_ragis/trajectories.py | 335 ----------------------- requirements.txt | 4 +- 5 files changed, 2 insertions(+), 732 deletions(-) delete mode 100644 pism_ragis/interpolation.py delete mode 100644 pism_ragis/trajectories.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 1209e72..07d5117 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -26,10 +26,9 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - sudo apt-get install gdal-bin libgdal-dev python -m pip install --upgrade pip python -m pip install flake8 pytest - if [ -f requirements-testing.txt ]; then pip install -r requirements-testing.txt; fi + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names diff --git a/pism_ragis/__init__.py b/pism_ragis/__init__.py index 99c2d56..44fb03d 100644 --- a/pism_ragis/__init__.py +++ b/pism_ragis/__init__.py @@ -11,7 +11,5 @@ "computing", "stats", "computing", - "interpolation", "systems", - "trajectories", ] diff --git a/pism_ragis/interpolation.py b/pism_ragis/interpolation.py deleted file mode 100644 index 4980626..0000000 --- a/pism_ragis/interpolation.py +++ /dev/null @@ -1,390 +0,0 @@ -# Copyright (C) 2023 Andy Aschwanden, Constantine Khroulev -# -# This file is part of pism-ragis. -# -# PISM-RAGIS is free software; you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation; either version 3 of the License, or (at your option) any later -# version. -# -# PISM-RAGIS is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License -# along with PISM; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - -""" -Module provides functions for interpolation -""" - -from typing import Optional, Tuple, Union - -import numpy as np -import scipy -from numpy import ndarray -from shapely import Point -from xarray import DataArray - - -class InterpolationMatrix: - - """Stores bilinear and nearest neighbor interpolation weights used to - extract profiles. - - """ - - def __init__( - self, - x: np.ndarray, - y: np.ndarray, - px: np.ndarray, - py: np.ndarray, - bilinear=True, - ): - """Interpolate values of z to points (px,py) assuming that z is on a - regular grid defined by x and y.""" - super().__init__() - - assert len(px) == len(py) - - # The grid has to be equally spaced. - assert np.fabs(np.diff(x).max() - np.diff(x).min()) < 1e-9 - assert np.fabs(np.diff(y).max() - np.diff(y).min()) < 1e-9 - - dx = x[1] - x[0] - dy = y[1] - y[0] - - assert dx != 0 - assert dy != 0 - - cs = [self.grid_column(x, dx, p_x) for p_x in px] - rs = [self.grid_column(y, dy, p_y) for p_y in py] - - self.c_min = np.min(cs) - self.c_max = min(np.max(cs) + 1, len(x) - 1) - - self.r_min = np.min(rs) - self.r_max = min(np.max(rs) + 1, len(y) - 1) - - # compute the size of the subset needed for interpolation - self.n_rows = self.r_max - self.r_min + 1 - self.n_cols = self.c_max - self.c_min + 1 - - n_points = len(px) - self.A = scipy.sparse.lil_matrix((n_points, self.n_rows * self.n_cols)) - - if bilinear: - self._compute_bilinear_matrix(x, y, dx, dy, px, py) - else: - raise NotImplementedError - - def column(self, r, c): - """Interpolation matrix column number corresponding to r,c of the - array *subset*. This is the same as the linear index within - the subset needed for interpolation. - - """ - return self.n_cols * min(r, self.n_rows - 1) + min(c, self.n_cols - 1) - - @staticmethod - def find(grid, delta, point): - """Find the point to the left of point on the grid with spacing - delta.""" - if delta > 0: - # grid points are stored in the increasing order - if point <= grid[0]: # pylint: disable=R1705 - return 0 - elif point >= grid[-1]: - return len(grid) - 1 # pylint: disable=R1705 - else: - return int(np.floor((point - grid[0]) / delta)) - else: - # grid points are stored in the decreasing order - if point >= grid[0]: # pylint: disable=R1705 - return 0 - elif point <= grid[-1]: - return len(grid) - 1 - else: - return int(np.floor((point - grid[0]) / delta)) - - def grid_column(self, x, dx, X): - "Input grid column number corresponding to X." - return self.find(x, dx, X) - - def grid_row(self, y, dy, Y): - "Input grid row number corresponding to Y." - return self.find(y, dy, Y) - - def _compute_bilinear_matrix(self, x, y, dx, dy, px, py): - """Initialize a bilinear interpolation matrix.""" - for k in range(self.A.shape[0]): - x_k = px[k] - y_k = py[k] - - x_min = np.min(x) - x_max = np.max(x) - - y_min = np.min(y) - y_max = np.max(y) - - # make sure we are in the bounding box defined by the grid - x_k = max(x_k, x_min) - x_k = min(x_k, x_max) - y_k = max(y_k, y_min) - y_k = min(y_k, y_max) - - C = self.grid_column(x, dx, x_k) - R = self.grid_row(y, dy, y_k) - - alpha = (x_k - x[C]) / dx - beta = (y_k - y[R]) / dy - - if alpha < 0.0: - alpha = 0.0 - elif alpha > 1.0: - alpha = 1.0 - - if beta < 0.0: - beta = 0.0 - elif beta > 1.0: - beta = 1.0 - - # indexes within the subset needed for interpolation - c = C - self.c_min - r = R - self.r_min - - self.A[k, self.column(r, c)] += (1.0 - alpha) * (1.0 - beta) - self.A[k, self.column(r + 1, c)] += (1.0 - alpha) * beta - self.A[k, self.column(r, c + 1)] += alpha * (1.0 - beta) - self.A[k, self.column(r + 1, c + 1)] += alpha * beta - - def adjusted_matrix(self, mask): - """Return adjusted interpolation matrix that ignores missing (masked) - values.""" - - A = self.A.tocsr() - n_points = A.shape[0] - - output_mask = np.zeros(n_points, dtype=np.bool_) - - for r in range(n_points): - # for each row, i.e. each point along the profile - row = np.s_[A.indptr[r] : A.indptr[r + 1]] - # get the locations and values - indexes = A.indices[row] - values = A.data[row] - - # if a particular location is masked, set the - # interpolation weight to zero - for k, index in enumerate(indexes): - if np.ravel(mask)[index]: - values[k] = 0.0 - - # normalize so that we still have an interpolation matrix - if values.sum() > 0: - values = values / values.sum() - else: - output_mask[r] = True - - A.data[row] = values - - A.eliminate_zeros() - - return A, output_mask - - def apply(self, array): - """Apply the interpolation to an array. Returns values at points along - the profile.""" - subset = array[self.r_min : self.r_max + 1, self.c_min : self.c_max + 1] - return self.apply_to_subset(subset) - - def apply_to_subset(self, subset): - """Apply interpolation to an array subset.""" - - if np.ma.is_masked(subset): - A, mask = self.adjusted_matrix(subset.mask) - data = A * np.ravel(subset) - return np.ma.array(data, mask=mask) - - return self.A.tocsr() * np.ravel(subset) - - -def interpolate_rkf( - Vx: np.ndarray, - Vy: np.ndarray, - x: np.ndarray, - y: np.ndarray, - start_pt: Point, - delta_time: float = 0.1, -) -> Tuple[Optional[Point], Optional[float]]: - """ - Interpolate point-like object position according to the Runge-Kutta-Fehlberg method. - - :param geoarray: the flow field expressed as a GeoArray. - :type geoarray: GeoArray. - :param delta_time: the flow field expressed as a GeoArray. - :type delta_time: GeoArray. - :param start_pt: the initial point. - :type start_pt: Point. - :return: the estimated point-like object position at the incremented time, with the estimation error. - :rtype: tuple of optional point and optional float. - - Examples: - """ - - k1_vx, k1_vy = velocity_at_point(Vx, Vy, x, y, start_pt) - - if k1_vx is None or k1_vy is None: - return None, None - - k2_pt = Point( - start_pt.x + (0.25) * delta_time * k1_vx, - start_pt.y + (0.25) * delta_time * k1_vy, - ) - - k2_vx, k2_vy = velocity_at_point(Vx, Vy, x, y, k2_pt) - - if k2_vx is None or k2_vy is None: - return None, None - - k3_pt = Point( - start_pt.x - + (3.0 / 32.0) * delta_time * k1_vx - + (9.0 / 32.0) * delta_time * k2_vx, - start_pt.y - + (3.0 / 32.0) * delta_time * k1_vy - + (9.0 / 32.0) * delta_time * k2_vy, - ) - - k3_vx, k3_vy = velocity_at_point(Vx, Vy, x, y, k3_pt) - - if k3_vx is None or k3_vy is None: - return None, None - - k4_pt = Point( - start_pt.x - + (1932.0 / 2197.0) * delta_time * k1_vx - - (7200.0 / 2197.0) * delta_time * k2_vx - + (7296.0 / 2197.0) * delta_time * k3_vx, - start_pt.y - + (1932.0 / 2197.0) * delta_time * k1_vy - - (7200.0 / 2197.0) * delta_time * k2_vy - + (7296.0 / 2197.0) * delta_time * k3_vy, - ) - - k4_vx, k4_vy = velocity_at_point(Vx, Vy, x, y, k4_pt) - - if k4_vx is None or k4_vy is None: - return None, None - - k5_pt = Point( - start_pt.x - + (439.0 / 216.0) * delta_time * k1_vx - - (8.0) * delta_time * k2_vx - + (3680.0 / 513.0) * delta_time * k3_vx - - (845.0 / 4104.0) * delta_time * k4_vx, - start_pt.y - + (439.0 / 216.0) * delta_time * k1_vy - - (8.0) * delta_time * k2_vy - + (3680.0 / 513.0) * delta_time * k3_vy - - (845.0 / 4104.0) * delta_time * k4_vy, - ) - - k5_vx, k5_vy = velocity_at_point(Vx, Vy, x, y, k5_pt) - - if k5_vx is None or k5_vy is None: - return None, None - - k6_pt = Point( - start_pt.x - - (8.0 / 27.0) * delta_time * k1_vx - + (2.0) * delta_time * k2_vx - - (3544.0 / 2565.0) * delta_time * k3_vx - + (1859.0 / 4104.0) * delta_time * k4_vx - - (11.0 / 40.0) * delta_time * k5_vx, - start_pt.y - - (8.0 / 27.0) * delta_time * k1_vy - + (2.0) * delta_time * k2_vy - - (3544.0 / 2565.0) * delta_time * k3_vy - + (1859.0 / 4104.0) * delta_time * k4_vy - - (11.0 / 40.0) * delta_time * k5_vy, - ) - - k6_vx, k6_vy = velocity_at_point(Vx, Vy, x, y, k6_pt) - - if k6_vx is None or k6_vy is None: - return None, None - - rkf_4o_x = start_pt.x + delta_time * ( - (25.0 / 216.0) * k1_vx - + (1408.0 / 2565.0) * k3_vx - + (2197.0 / 4104.0) * k4_vx - - (1.0 / 5.0) * k5_vx - ) - rkf_4o_y = start_pt.y + delta_time * ( - (25.0 / 216.0) * k1_vy - + (1408.0 / 2565.0) * k3_vy - + (2197.0 / 4104.0) * k4_vy - - (1.0 / 5.0) * k5_vy - ) - temp_pt = Point(rkf_4o_x, rkf_4o_y) - - interp_x = start_pt.x + delta_time * ( - (16.0 / 135.0) * k1_vx - + (6656.0 / 12825.0) * k3_vx - + (28561.0 / 56430.0) * k4_vx - - (9.0 / 50.0) * k5_vx - + (2.0 / 55.0) * k6_vx - ) - interp_y = start_pt.y + delta_time * ( - (16.0 / 135.0) * k1_vy - + (6656.0 / 12825.0) * k3_vy - + (28561.0 / 56430.0) * k4_vy - - (9.0 / 50.0) * k5_vy - + (2.0 / 55.0) * k6_vy - ) - interp_pt = Point(interp_x, interp_y) - - interp_pt_error_estim = interp_pt.distance(temp_pt) - - return interp_pt, interp_pt_error_estim - - -def velocity_at_point( - Vx: Union[ndarray, DataArray], - Vy: Union[ndarray, DataArray], - x: Union[ndarray, DataArray], - y: Union[ndarray, DataArray], - p: Union[list[Point], Point], -) -> Tuple: - """ - Return velocity at Point p using bilinear interpolation - """ - - if isinstance(Vx, DataArray): - Vx = Vx.to_numpy() - if isinstance(Vy, DataArray): - Vy = Vy.to_numpy() - if isinstance(x, DataArray): - x = x.to_numpy() - if isinstance(y, DataArray): - y = y.to_numpy() - - if isinstance(p, Point): - if p.is_empty: - return None, None - px = np.array([p.x]) - py = np.array([p.y]) - else: - px = np.array([pt.x for pt in p]) - py = np.array([pt.y for pt in p]) - A = InterpolationMatrix(x, y, px, py) - vx = A.apply(Vx) - vy = A.apply(Vy) - if isinstance(p, Point): - vx = vx[0] - vy = vy[0] - return vx, vy diff --git a/pism_ragis/trajectories.py b/pism_ragis/trajectories.py deleted file mode 100644 index 703cce0..0000000 --- a/pism_ragis/trajectories.py +++ /dev/null @@ -1,335 +0,0 @@ -# Copyright (C) 2023 Andy Aschwanden, Constantine Khroulev -# -# This file is part of pism-ragis. -# -# PISM-RAGIS is free software; you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation; either version 3 of the License, or (at your option) any later -# version. -# -# PISM-RAGIS is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License -# along with PISM; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - -""" -Module provides functions for calculating trajectories -""" - - -from pathlib import Path -from typing import Tuple, Union - -import geopandas as gp -import numpy as np -import pandas as pd -import xarray as xr -from geopandas import GeoDataFrame -from numpy import ndarray -from osgeo import ogr, osr -from shapely import Point -from tqdm.auto import tqdm -from xarray import DataArray - -from .interpolation import interpolate_rkf, velocity_at_point - -# Need to figure out how to make hooks so we can detect and propagate how we use TQDM -# https://github.com/benbovy/xarray-simlab/blob/master/xsimlab/monitoring.py - - -def compute_trajectory( - point: Point, - Vx: Union[ndarray, DataArray], - Vy: Union[ndarray, DataArray], - x: Union[ndarray, DataArray], - y: Union[ndarray, DataArray], - dt: float = 0.1, - total_time: float = 1000, - reverse: bool = False, -) -> Tuple[list[Point], list]: - """ - Compute trajectory - - Computes a trajectory using Runge-Kutta-Fehlberg integration. Routine is - unit-agnostic, requiring the user to ensure consistency of units. For example - if the velocity field is given in m/yr, the `dt` and `total_time` are assumed - to be in years. - - Parameters - ---------- - Point : shapely.Point - Starting point of the trajectory - Vx : numpy.ndarray or xarray.DataArray - x-component of velocity - Vy : numpy.ndarray or xarray.DataArray - y-component of velocity - x : numpy.ndarray or xarray.DataArray - coordinates in x direction - y : numpy.ndarray or xarray.DataArray - coordinates in y direction - dt : float - integration time step - dt : float - total integration time - - Returns - ---------- - pts: list[shapely.Point] - `dt`-spaced points along trajectory from 0 to `total_time`. - pts_error_estim: list[ - error estimate at `dt`-spaced points along trajectory - from 0 to `total_time`. - - Examples - ---------- - - Create data: - - >>> import numpy as np - >>> from shapely import Point - - >>> nx = 201 - >>> ny = 401 - >>> x = np.linspace(-100e3, 100e3, nx) - >>> y = np.linspace(-100e3, 100e3, ny) - >>> X, Y = np.meshgrid(x, y) - - >>> vx = -Y / np.sqrt(X**2 + Y**2) * 250 - >>> vy = X / np.sqrt(X**2 + Y**2) * 250 - >>> p = Point(0, -50000) - - >>> pts, pts_error_estim = compute_trajectory(p, vx, vx, x, y, dt=1, total_time=10) - >>> pts - [, - , - , - , - , - , - , - , - , - , - , - ] - """ - if reverse: - Vx = -Vx - Vy = -Vy - pts = [point] - pts_error_estim = [0.0] - time = 0.0 - with tqdm(desc="Integration trajectory", total=total_time) as pbar: - while abs(time) <= (total_time): - point, point_error_estim = interpolate_rkf( - Vx, Vy, x, y, point, delta_time=dt - ) - if (point is None) or (point_error_estim is None): - break - pts.append(point) - pts_error_estim.append(point_error_estim) - time += dt - pbar.update(dt) - pbar.refresh() - return pts, pts_error_estim - - -def compute_perturbation( - data_url: Union[str, Path], - ogr_url: Union[str, Path], - perturbation: int = 0, - sample: Union[list, ndarray] = [0.5, 0.5], - sigma: float = 1, - total_time: float = 10_000, - dt: float = 1, - reverse: bool = False, -) -> GeoDataFrame: - """ - Compute a perturbed trajectory. - - It appears OGR objects cannot be pickled by joblib hence we load it here. - - Parameters - ---------- - url : string or pathlib.Path - Path to an ogr data set - VX_min : numpy.ndarray or xarray.DataArray - Minimum - VX_min : dict-like, optional - Another mapping in similar form as the `data_vars` argument, - except the each item is saved on the dataset as a "coordinate". - These variables have an associated meaning: they describe - constant/fixed/independent quantities, unlike the - varying/measured/dependent quantities that belong in - `variables`. Coordinates values may be given by 1-dimensional - arrays or scalars, in which case `dims` do not need to be - supplied: 1D arrays will be assumed to give index values along - the dimension with the same name. - - The following notations are accepted: - - - mapping {coord name: DataArray} - - mapping {coord name: Variable} - - mapping {coord name: (dimension name, array-like)} - - mapping {coord name: (tuple of dimension names, array-like)} - - mapping {dimension name: array-like} - (the dimension name is implicitly set to be the same as the - coord name) - - The last notation implies that the coord name is the same as - the dimension name. - - attrs : dict-like, optional - Global attributes to save on this dataset. - - Examples - -------- - Create data: - - >>> np.random.seed(0) - >>> temperature = 15 + 8 * np.random.randn(2, 2, 3) - >>> precipitation = 10 * np.random.rand(2, 2, 3) - >>> lon = [[-99.83, -99.32], [-99.79, -99.23]] - >>> lat = [[42.25, 42.21], [42.63, 42.59]] - >>> time = pd.date_range("2014-09-06", periods=3) - >>> reference_time = pd.Timestamp("2014-09-05") - - Initialize a dataset with multiple dimensions: - - >>> ds = xr.Dataset( - ... data_vars=dict( - ... temperature=(["x", "y", "time"], temperature), - ... precipitation=(["x", "y", "time"], precipitation), - ... ), - ... coords=dict( - ... lon=(["x", "y"], lon), - ... lat=(["x", "y"], lat), - ... time=time, - ... reference_time=reference_time, - ... ), - ... attrs=dict(description="Weather related data."), - ... ) - >>> ds - - Dimensions: (x: 2, y: 2, time: 3) - Coordinates: - lon (x, y) float64 -99.83 -99.32 -99.79 -99.23 - lat (x, y) float64 42.25 42.21 42.63 42.59 - * time (time) datetime64[ns] 2014-09-06 2014-09-07 2014-09-08 - reference_time datetime64[ns] 2014-09-05 - Dimensions without coordinates: x, y - Data variables: - temperature (x, y, time) float64 29.11 18.2 22.83 ... 18.28 16.15 26.63 - precipitation (x, y, time) float64 5.68 9.256 0.7104 ... 7.992 4.615 7.805 - Attributes: - description: Weather related data. - - Find out where the coldest temperature was and what values the - other variables had: - - >>> ds.isel(ds.temperature.argmin(...)) - - Dimensions: () - Coordinates: - lon float64 -99.32 - lat float64 42.21 - time datetime64[ns] 2014-09-08 - reference_time datetime64[ns] 2014-09-05 - Data variables: - temperature float64 7.182 - precipitation float64 8.326 - Attributes: - description: Weather related data. - - - """ - - ds = xr.open_dataset(data_url) - - VX = np.squeeze(ds["vx"].to_numpy()) - VY = np.squeeze(ds["vy"].to_numpy()) - VX_e = np.squeeze(ds["vx_err"].to_numpy()) - VY_e = np.squeeze(ds["vy_err"].to_numpy()) - x = ds["x"].to_numpy() - y = ds["y"].to_numpy() - - Vx, Vy = get_perturbed_velocities(VX, VY, VX_e, VY_e, sample=sample, sigma=sigma) - ogr.UseExceptions() - if isinstance(ogr_url, Path): - ogr_url = str(ogr_url.absolute()) - in_ds = ogr.Open(ogr_url) - - layer = in_ds.GetLayer(0) - srs_geo = osr.SpatialReference() - srs_geo.ImportFromEPSG(3413) - - all_glaciers = [] - with tqdm(enumerate(layer), total=len(layer), leave=False) as pbar: - for _, feature in pbar: - geometry = feature.GetGeometryRef() - geometry.TransformTo(srs_geo) - points = geometry.GetPoints() - points = [Point(p) for p in points] - attrs = feature.items() - attrs["perturbation"] = perturbation - glacier_name = attrs["name"] - pbar.set_description(f"""Processing {glacier_name}""") - trajs = [] - for p in points: - traj, _ = compute_trajectory( - p, Vx, Vy, x, y, total_time=total_time, dt=dt, reverse=reverse - ) - trajs.append(traj) - df = trajectories_to_geopandas(trajs, Vx, Vy, x, y, attrs=attrs) - all_glaciers.append(df) - pbar.refresh() - return pd.concat(all_glaciers) - - -def get_perturbed_velocities( - VX, VY, VX_e, VY_e, sample, sigma: float = 1.0 -) -> Tuple[Union[ndarray, DataArray], Union[ndarray, DataArray]]: - """ - Return perturbed velocity field - """ - VX_min, VX_max = VX - sigma * VX_e, VX + sigma * VX_e - VY_min, VY_max = VY - sigma * VY_e, VY + sigma * VY_e - - Vx = VX_min + sample[0] * (VX_max - VX_min) - Vy = VY_min + sample[1] * (VY_max - VY_min) - - return Vx, Vy - - -def trajectories_to_geopandas( - trajs: list, - Vx: np.ndarray, - Vy: np.ndarray, - x: np.ndarray, - y: np.ndarray, - attrs: dict = {}, -) -> gp.GeoDataFrame: - """Convert trajectory to GeoDataFrame""" - dfs = [] - for traj_id, traj in enumerate(trajs): - vx, vy = velocity_at_point(Vx, Vy, x, y, traj) - v = np.sqrt(vx**2 + vy**2) - d = [0] + [traj[k].distance(traj[k - 1]) for k in range(1, len(traj))] - traj_data = { - "vx": vx, - "vy": vy, - "v": v, - "trai_id": traj_id, - "traj_pt": range(len(traj)), - "distance": d, - "distance_from_origin": np.cumsum(d), - } - for k, v in attrs.items(): - traj_data[k] = v - df = gp.GeoDataFrame.from_dict(traj_data, geometry=traj, crs="EPSG:3413") - dfs.append(df) - return pd.concat(dfs).reset_index(drop=True) diff --git a/requirements.txt b/requirements.txt index e3ae914..5402d0e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,4 @@ -numpy>=1.26.0 dask>=2023.10.1 -gdal>=3.7.1 -geopandas>=0.14.0 joblib>=1.3.2 matplotlib>=3.8.0 pandas>=2.0.3 @@ -9,6 +6,7 @@ pydoe>=0.3.8 salib>=1.4.7 scipy>=1.10.1 shapely>=2.0.2 +h5netcdf xarray>=2023.10.1 pyogrio>=0.7.2 pyarrow>=14.0.0