Skip to content

Commit cfeb505

Browse files
committed
fix plist
1 parent f7ed165 commit cfeb505

File tree

2 files changed

+13
-10
lines changed

2 files changed

+13
-10
lines changed

python/sdist/amici/petab/petab_importer.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -747,16 +747,18 @@ def apply_parameters(
747747
experiment_id = edata.id
748748
experiment = self._petab_problem[experiment_id]
749749

750-
# TODO set plist -- estimated parameters + those mapped via placeholders
750+
# plist -- estimated parameters + those mapped via placeholders
751+
# TODO sufficient to set them during creation of edata or allow dynamic fixing of parameters?
752+
# store list of sensitivity parameter in class instead of using x_free_ids or estimate=True
751753
plist = []
752754
placeholder_mappings = self._get_placeholder_mapping(experiment)
753755
estimated_par_ids = self._petab_problem.x_free_ids
754756
for model_par_idx, model_par_id in enumerate(
755757
self._model.get_parameter_ids()
756758
):
757-
if (
758-
model_par_id in estimated_par_ids
759-
or model_par_id in placeholder_mappings
759+
if model_par_id in estimated_par_ids or (
760+
(maps_to := placeholder_mappings.get(model_par_id)) is not None
761+
and maps_to in estimated_par_ids
760762
):
761763
plist.append(model_par_idx)
762764
edata.plist = plist

tests/benchmark_models/test_petab_benchmark.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -738,22 +738,23 @@ def test_nominal_parameters_llh_v2(problem_id):
738738
# check llh
739739
compare_to_reference(problem_id, llh)
740740

741+
# check gradient
742+
if problem_id not in problems_for_gradient_check:
743+
return None
744+
# pytest.skip("Excluded from gradient check.")
745+
741746
# sensitivities computed w.r.t. the expected parameters? (`plist` correct?)
742-
# TODO: allow fixing parameters?
743747
ps._solver.set_sensitivity_order(SensitivityOrder.first)
744748
ps._solver.set_sensitivity_method(SensitivityMethod.forward)
749+
ps._model.set_always_check_finite(True)
745750
result = ps.simulate(
746751
problem_parameters=problem_parameters,
747752
)
753+
assert result[SLLH] is not None
748754
actual_sens_pars = set(result[SLLH].keys())
749755
expected_sens_pars = set(problem.x_free_ids)
750756
assert actual_sens_pars == expected_sens_pars
751757

752-
# check gradient
753-
if problem_id not in problems_for_gradient_check:
754-
return None
755-
# pytest.skip("Excluded from gradient check.")
756-
757758
# TODO
758759
scale = False
759760

0 commit comments

Comments
 (0)