Skip to content

Commit

Permalink
Merge pull request #74 from pastas/dev
Browse files Browse the repository at this point in the history
Release 0.9.1: minor fixes
  • Loading branch information
dbrakenhoff authored Dec 5, 2022
2 parents 879efe6 + 8562603 commit b676e0f
Show file tree
Hide file tree
Showing 16 changed files with 675 additions and 302 deletions.
8 changes: 6 additions & 2 deletions pastastore/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
from . import connectors, util
from .connectors import (ArcticConnector, DictConnector, PasConnector,
PystoreConnector)
from .connectors import (
ArcticConnector,
DictConnector,
PasConnector,
PystoreConnector,
)
from .store import PastaStore
from .version import __version__
100 changes: 72 additions & 28 deletions pastastore/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,9 @@ def _add_item(
pass

@abstractmethod
def _get_item(self, libname: str, name: str) -> Union[FrameorSeriesUnion, Dict]:
def _get_item(
self, libname: str, name: str
) -> Union[FrameorSeriesUnion, Dict]:
"""Internal method to get item (series or pastas.Models).
Must be overriden by subclass.
Expand Down Expand Up @@ -267,7 +269,9 @@ def _update_series(
update_meta = self._get_metadata(libname, name)
if metadata is not None:
update_meta.update(metadata)
self._add_series(libname, update, name, metadata=update_meta, overwrite=True)
self._add_series(
libname, update, name, metadata=update_meta, overwrite=True
)

def _upsert_series(
self,
Expand Down Expand Up @@ -320,7 +324,9 @@ def update_metadata(self, libname: str, name: str, metadata: dict) -> None:
# get series, since just updating metadata is not really defined
# in all cases
s = self._get_series(libname, name, progressbar=False)
self._add_series(libname, s, name, metadata=update_meta, overwrite=True)
self._add_series(
libname, s, name, metadata=update_meta, overwrite=True
)

def add_oseries(
self,
Expand Down Expand Up @@ -590,7 +596,9 @@ def del_models(self, names: Union[list, str]) -> None:
self._del_oseries_model_link(oname, n)
self._clear_cache("_modelnames_cache")

def del_oseries(self, names: Union[list, str], remove_models: bool = False):
def del_oseries(
self, names: Union[list, str], remove_models: bool = False
):
"""Delete oseries from the database.
Parameters
Expand All @@ -607,7 +615,9 @@ def del_oseries(self, names: Union[list, str], remove_models: bool = False):
# remove associated models from database
if remove_models:
modelnames = list(
chain.from_iterable([self.oseries_models.get(n, []) for n in names])
chain.from_iterable(
[self.oseries_models.get(n, []) for n in names]
)
)
self.del_models(modelnames)

Expand Down Expand Up @@ -835,7 +845,9 @@ def get_models(
if return_dict:
ml = data
else:
ml = self._parse_model_dict(data, update_ts_settings=update_ts_settings)
ml = self._parse_model_dict(
data, update_ts_settings=update_ts_settings
)
models.append(ml)
if len(models) == 1 and squeeze:
return models[0]
Expand Down Expand Up @@ -867,11 +879,15 @@ def empty_library(
return
names = self._parse_names(None, libname)
for name in (
tqdm(names, desc=f"Deleting items from {libname}") if progressbar else names
tqdm(names, desc=f"Deleting items from {libname}")
if progressbar
else names
):
self._del_item(libname, name)
self._clear_cache(libname)
print(f"Emptied library {libname} in {self.name}: " f"{self.__class__}")
print(
f"Emptied library {libname} in {self.name}: " f"{self.__class__}"
)

def _iter_series(self, libname: str, names: Optional[List[str]] = None):
"""Internal method iterate over timeseries in library.
Expand Down Expand Up @@ -950,9 +966,13 @@ def iter_models(

modelnames = self._parse_names(modelnames, "models")
for mlnam in modelnames:
yield self.get_models(mlnam, return_dict=return_dict, progressbar=False)
yield self.get_models(
mlnam, return_dict=return_dict, progressbar=False
)

def _add_oseries_model_links(self, onam: str, mlnames: Union[str, List[str]]):
def _add_oseries_model_links(
self, onam: str, mlnames: Union[str, List[str]]
):
"""Add model name to stored list of models per oseries.
Parameters
Expand Down Expand Up @@ -1198,12 +1218,12 @@ def _parse_model_dict(self, mdict: dict, update_ts_settings: bool = False):
mdict["oseries"]["series"] = self.get_oseries(name)
# update tmin/tmax from timeseries
if update_ts_settings:
mdict["oseries"]["settings"]["tmin"] = mdict["oseries"]["series"].index[
0
]
mdict["oseries"]["settings"]["tmax"] = mdict["oseries"]["series"].index[
-1
]
mdict["oseries"]["settings"]["tmin"] = mdict["oseries"][
"series"
].index[0]
mdict["oseries"]["settings"]["tmax"] = mdict["oseries"][
"series"
].index[-1]

# StressModel, WellModel
for ts in mdict["stressmodels"].values():
Expand All @@ -1215,8 +1235,12 @@ def _parse_model_dict(self, mdict: dict, update_ts_settings: bool = False):
stress["series"] = self.get_stresses(name)
# update tmin/tmax from timeseries
if update_ts_settings:
stress["settings"]["tmin"] = stress["series"].index[0]
stress["settings"]["tmax"] = stress["series"].index[-1]
stress["settings"]["tmin"] = stress[
"series"
].index[0]
stress["settings"]["tmax"] = stress[
"series"
].index[-1]

# RechargeModel, TarsoModel
if ("prec" in ts.keys()) and ("evap" in ts.keys()):
Expand All @@ -1227,16 +1251,24 @@ def _parse_model_dict(self, mdict: dict, update_ts_settings: bool = False):
stress["series"] = self.get_stresses(name)
# update tmin/tmax from timeseries
if update_ts_settings:
stress["settings"]["tmin"] = stress["series"].index[0]
stress["settings"]["tmax"] = stress["series"].index[-1]
stress["settings"]["tmin"] = stress[
"series"
].index[0]
stress["settings"]["tmax"] = stress[
"series"
].index[-1]
else:
msg = "stress '{}' not present in project".format(name)
msg = "stress '{}' not present in project".format(
name
)
raise KeyError(msg)
# hack for pcov w dtype object (when filled with NaNs on store?)
if "fit" in mdict:
if "pcov" in mdict["fit"]:
pcov = mdict["fit"]["pcov"]
if pcov.dtypes.apply(lambda dtyp: isinstance(dtyp, object)).any():
if pcov.dtypes.apply(
lambda dtyp: isinstance(dtyp, object)
).any():
mdict["fit"]["pcov"] = pcov.astype(float)

try:
Expand All @@ -1261,11 +1293,17 @@ def _validate_input_series(series):
TypeError
if object is not of type pandas.DataFrame or pandas.Series
"""
if not (isinstance(series, pd.DataFrame) or isinstance(series, pd.Series)):
raise TypeError("Please provide pandas.DataFrame" " or pandas.Series!")
if not (
isinstance(series, pd.DataFrame) or isinstance(series, pd.Series)
):
raise TypeError(
"Please provide pandas.DataFrame" " or pandas.Series!"
)
if isinstance(series, pd.DataFrame):
if series.columns.size > 1:
raise ValueError("Only DataFrames with one " "column are supported!")
raise ValueError(
"Only DataFrames with one " "column are supported!"
)

@staticmethod
def _set_series_name(series, name):
Expand Down Expand Up @@ -1325,7 +1363,9 @@ def _check_model_series_names_for_store(ml):
for istress in sm.stress
]
# RechargeModel, TarsoModel
if isin(prec_evap_model, [i._name for i in ml.stressmodels.values()]).any():
if isin(
prec_evap_model, [i._name for i in ml.stressmodels.values()]
).any():
series_names += [
istress.series.name
for sm in ml.stressmodels.values()
Expand Down Expand Up @@ -1406,7 +1446,7 @@ def _check_stresses_in_store(self, ml: Union[ps.Model, dict]):
else:
stresses = sm.stress
for s in stresses:
if s.name not in self.stresses.index:
if str(s.name) not in self.stresses.index:
msg = (
f"Cannot add model because stress '{s.name}' "
"is not contained in store."
Expand All @@ -1426,7 +1466,7 @@ def _check_stresses_in_store(self, ml: Union[ps.Model, dict]):
else:
stresses = sm["stress"]
for s in stresses:
if s["name"] not in self.stresses.index:
if str(s["name"]) not in self.stresses.index:
msg = (
f"Cannot add model because stress '{s['name']}' "
"is not contained in store."
Expand Down Expand Up @@ -1690,3 +1730,7 @@ def __len__(self):
of models
"""
return self.conn.n_models

def random(self):
from random import choice
return self.conn.get_models(choice(self.conn._modelnames_cache))
20 changes: 15 additions & 5 deletions pastastore/connectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,9 @@ def _add_item(
lib = self._get_library(libname)
lib.write(name, item, metadata=metadata)

def _get_item(self, libname: str, name: str) -> Union[FrameorSeriesUnion, Dict]:
def _get_item(
self, libname: str, name: str
) -> Union[FrameorSeriesUnion, Dict]:
"""Internal method to retrieve item from library.
Parameters
Expand Down Expand Up @@ -290,7 +292,9 @@ def _add_item(
is_type = "series"
elif isinstance(item, dict):
s = pd.DataFrame() # empty DataFrame as placeholder
jsondict = json.loads(json.dumps(item, cls=PastasEncoder, indent=4))
jsondict = json.loads(
json.dumps(item, cls=PastasEncoder, indent=4)
)
metadata = jsondict # model dict is stored in metadata
is_type = "series"
elif isinstance(item, list):
Expand All @@ -309,7 +313,9 @@ def _add_item(
lib = self._get_library(libname)
lib.write(name, s, metadata=metadata, overwrite=overwrite)

def _get_item(self, libname: str, name: str) -> Union[FrameorSeriesUnion, Dict]:
def _get_item(
self, libname: str, name: str
) -> Union[FrameorSeriesUnion, Dict]:
"""Internal method to retrieve item from pystore library.
Parameters
Expand Down Expand Up @@ -484,7 +490,9 @@ def _add_item(
else:
lib[name] = (metadata, item)

def _get_item(self, libname: str, name: str) -> Union[FrameorSeriesUnion, Dict]:
def _get_item(
self, libname: str, name: str
) -> Union[FrameorSeriesUnion, Dict]:
"""Internal method to retrieve item from pystore library.
Parameters
Expand Down Expand Up @@ -666,7 +674,9 @@ def _add_item(
with open(fname, "w") as fm:
fm.write(jsondict)

def _get_item(self, libname: str, name: str) -> Union[FrameorSeriesUnion, Dict]:
def _get_item(
self, libname: str, name: str
) -> Union[FrameorSeriesUnion, Dict]:
"""Internal method to retrieve item.
Parameters
Expand Down
49 changes: 37 additions & 12 deletions pastastore/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,9 @@ def example_pastastore(conn="DictConnector"):
# add data

# oseries 1
o = pd.read_csv(os.path.join(datadir, "obs.csv"), index_col=0, parse_dates=True)
o = pd.read_csv(
os.path.join(datadir, "obs.csv"), index_col=0, parse_dates=True
)
pstore.add_oseries(o, "oseries1", metadata={"x": 165000, "y": 424000})
# oseries 2
o = pd.read_csv(
Expand All @@ -49,38 +51,58 @@ def example_pastastore(conn="DictConnector"):
pstore.add_oseries(o, "oseries2", metadata={"x": 164000, "y": 423000})

# oseries 3
o = pd.read_csv(os.path.join(datadir, "gw_obs.csv"), index_col=0, parse_dates=True)
o = pd.read_csv(
os.path.join(datadir, "gw_obs.csv"), index_col=0, parse_dates=True
)
pstore.add_oseries(o, "oseries3", metadata={"x": 165554, "y": 422685})

# prec 1
s = pd.read_csv(os.path.join(datadir, "rain.csv"), index_col=0, parse_dates=True)
pstore.add_stress(s, "prec1", kind="prec", metadata={"x": 165050, "y": 424050})
s = pd.read_csv(
os.path.join(datadir, "rain.csv"), index_col=0, parse_dates=True
)
pstore.add_stress(
s, "prec1", kind="prec", metadata={"x": 165050, "y": 424050}
)

# prec 2
s = pd.read_csv(
os.path.join(datadir, "rain_nb1.csv"), index_col=0, parse_dates=True
)
pstore.add_stress(s, "prec2", kind="prec", metadata={"x": 164010, "y": 423000})
pstore.add_stress(
s, "prec2", kind="prec", metadata={"x": 164010, "y": 423000}
)

# evap 1
s = pd.read_csv(os.path.join(datadir, "evap.csv"), index_col=0, parse_dates=True)
pstore.add_stress(s, "evap1", kind="evap", metadata={"x": 164500, "y": 424000})
s = pd.read_csv(
os.path.join(datadir, "evap.csv"), index_col=0, parse_dates=True
)
pstore.add_stress(
s, "evap1", kind="evap", metadata={"x": 164500, "y": 424000}
)

# evap 2
s = pd.read_csv(
os.path.join(datadir, "evap_nb1.csv"), index_col=0, parse_dates=True
)
pstore.add_stress(s, "evap2", kind="evap", metadata={"x": 164000, "y": 423030})
pstore.add_stress(
s, "evap2", kind="evap", metadata={"x": 164000, "y": 423030}
)

# well 1
s = pd.read_csv(os.path.join(datadir, "well.csv"), index_col=0, parse_dates=True)
pstore.add_stress(s, "well1", kind="well", metadata={"x": 164691, "y": 423579})
s = pd.read_csv(
os.path.join(datadir, "well.csv"), index_col=0, parse_dates=True
)
pstore.add_stress(
s, "well1", kind="well", metadata={"x": 164691, "y": 423579}
)

# river notebook data (nb5)
oseries = pd.read_csv(
os.path.join(datadir, "nb5_head.csv"), parse_dates=True, index_col=0
).squeeze("columns")
pstore.add_oseries(oseries, "head_nb5", metadata={"x": 200_000, "y": 450_000.0})
pstore.add_oseries(
oseries, "head_nb5", metadata={"x": 200_000, "y": 450_000.0}
)

rain = pd.read_csv(
os.path.join(datadir, "nb5_prec.csv"), parse_dates=True, index_col=0
Expand All @@ -98,7 +120,10 @@ def example_pastastore(conn="DictConnector"):
os.path.join(datadir, "nb5_riv.csv"), parse_dates=True, index_col=0
).squeeze("columns")
pstore.add_stress(
waterlevel, "riv_nb5", kind="riv", metadata={"x": 200_000, "y": 450_000.0}
waterlevel,
"riv_nb5",
kind="riv",
metadata={"x": 200_000, "y": 450_000.0},
)

# multiwell notebook data
Expand Down
Loading

0 comments on commit b676e0f

Please sign in to comment.