Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AmiciObjective: check that sensitivities wrt all relevant parameters … #1416

Draft
wants to merge 11 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions doc/example/petab_import.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@
"converter_config = libsbml.SBMLLocalParameterConverter().getDefaultProperties()\n",
"petab_problem.sbml_document.convert(converter_config)\n",
"\n",
"obj = importer.create_objective()\n",
"obj = problem.objective\n",
"\n",
"# for some models, hyperparameters need to be adjusted\n",
"# obj.amici_solver.setMaxSteps(10000)\n",
Expand All @@ -200,7 +200,7 @@
"outputs": [],
"source": [
"ret = obj(\n",
" petab_problem.x_nominal_scaled,\n",
" petab_problem.x_nominal_free_scaled,\n",
" mode=\"mode_fun\",\n",
" sensi_orders=(0, 1),\n",
" return_dict=True,\n",
Expand Down Expand Up @@ -383,7 +383,7 @@
"outputs": [],
"source": [
"ref = visualize.create_references(\n",
" x=petab_problem.x_nominal_scaled, fval=obj(petab_problem.x_nominal_scaled)\n",
" x=petab_problem.x_nominal_scaled, fval=obj(petab_problem.x_nominal_free_scaled)\n",
")\n",
"\n",
"visualize.waterfall(result, reference=ref, scale_y=\"lin\")\n",
Expand Down
41 changes: 41 additions & 0 deletions pypesto/objective/amici/amici.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import tempfile
from collections import OrderedDict
from collections.abc import Sequence
from itertools import chain
from pathlib import Path
from typing import TYPE_CHECKING, Optional, Union

Expand Down Expand Up @@ -233,6 +234,33 @@ def __init__(
# `set_custom_timepoints` method for more information.
self.custom_timepoints = None

def _get_missing_sensitivities(self) -> set[str]:
"""Get the model parameters for which sensitivities are missing.

Get the model parameters w.r.t. which sensitivities are required,
but aren't available missing.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
but aren't available missing.
but aren't available.

"""
required_sensitivities = set()
objective_parameters = set(self.x_names)
# resolve parameter mapping: collect all model parameters that depend
# on the optimization parameters
for condition_mapping in self.parameter_mapping:
required_sensitivities.update(
model_par
for model_par, opt_par in chain(
condition_mapping.map_preeq_fix.items(),
condition_mapping.map_sim_fix.items(),
condition_mapping.map_sim_var.items(),
)
# we assume that the values in the parameter mapping are
# either numeric values or parameter ID strings
if opt_par in objective_parameters
)

# All parameters w.r.t. which we can compute sensitivities are the
# non-fixed parameters
return required_sensitivities - set(self.amici_model.getParameterIds())

def get_config(self) -> dict:
"""Return basic information of the objective configuration."""
info = super().get_config()
Expand Down Expand Up @@ -456,6 +484,19 @@ def call_unprocessed(
"""
import amici

# Check that we can compute the requested sensitivities
# based on the supplied model. If not, raise an error.
# This has to be done on every call, since the preprocessor may
# change the parameters w.r.t. which sensitivities are required.
# max(0, 0) to work with empty sensi_orders
if max(0, 0, *sensi_orders) > 0 and (
missing_sensitivities := self._get_missing_sensitivities()
):
raise ValueError(
f"Requested sensitivities w.r.t. parameters that can't "
f"be computed by the current model: {missing_sensitivities}."
)

x_dct = self.par_arr_to_dct(x)

# only ask amici to compute required quantities
Expand Down
3 changes: 3 additions & 0 deletions pypesto/petab/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,6 +447,9 @@ def create_objective(
Returns
-------
A :class:`pypesto.objective.AmiciObjective` for the model and the data.
This object is expected to be passed to
:class:`PetabImporter.create_problem` to correctly handle fixed
parameters.
Comment on lines +450 to +452
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we have a test to check the expected behavior if one does not pass it to PetabImporter.create_problem?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The problem is that we don't know the expected behaviour if one does not pass it to PetabImporter.create_problem

"""
# get simulation conditions
simulation_conditions = petab.get_simulation_conditions(
Expand Down
23 changes: 16 additions & 7 deletions test/petab/test_petab_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,16 +167,19 @@ def test_max_sensi_order():
"""Test that the AMICI objective created via PEtab exposes derivatives
correctly."""
model_name = "Boehm_JProteomeRes2014"
problem = pypesto.petab.PetabImporter.from_yaml(
importer = pypesto.petab.PetabImporter.from_yaml(
os.path.join(models.MODELS_DIR, model_name, model_name + ".yaml")
)
problem = importer.create_problem()

# define test parameter
par = problem.petab_problem.x_nominal_scaled
npar = len(par)
par = np.asarray(importer.petab_problem.x_nominal_scaled)[
problem.x_free_indices
]
npar = problem.dim

# auto-computed max_sensi_order and fim_for_hess
objective = problem.create_objective()
objective = problem.objective
hess = objective(par, sensi_orders=(2,))
assert hess.shape == (npar, npar)
assert (hess != 0).any()
Expand All @@ -190,18 +193,24 @@ def test_max_sensi_order():
)

# fix max_sensi_order to 1
objective = problem.create_objective(max_sensi_order=1)
objective = importer.create_problem(
objective=importer.create_objective(max_sensi_order=1)
).objective
Comment on lines +196 to +198
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This might be a minor thing, but i find this a really unintuitive way of creating the objective function correctly.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The problem is, that petab fixed parameters are only handled in create_problem, but not in create_objective. Maybe this could/should be changed. Not completely sure what might rely on the current behaviour.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

well both the importer and the problem should be aware of fixes parameters, so I agree that one should naturally expect problem.create_objective, importer.create_objective and importer.create_problem().objective to be equivalent and that passing importer.create_objective(max_sensi_order=1) to importer.create_problem isn't necessary.

objective(par, sensi_orders=(1,))
with pytest.raises(ValueError):
objective(par, sensi_orders=(2,))

# do not use FIM
objective = problem.create_objective(fim_for_hess=False)
objective = importer.create_problem(
objective=importer.create_objective(fim_for_hess=False)
).objective
with pytest.raises(ValueError):
objective(par, sensi_orders=(2,))

# only allow computing function values
objective = problem.create_objective(max_sensi_order=0)
objective = importer.create_problem(
objective=importer.create_objective(max_sensi_order=0)
).objective
objective(par)
with pytest.raises(ValueError):
objective(par, sensi_orders=(1,))
Expand Down
Loading