Skip to content

Commit 91ba4dc

Browse files
authored
Fix flatten_timepoint_specific_output_overrides not supporting observableParameter overrides as placeholders in noise formulae (#235)
Fixes #234 Also fixes return type annotation, makes test more readable, and uses more informative assertions.
1 parent 4e6a018 commit 91ba4dc

File tree

2 files changed

+49
-50
lines changed

2 files changed

+49
-50
lines changed

petab/core.py

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ def write_simulation_df(df: pd.DataFrame, filename: Union[str, Path]) -> None:
7171

7272

7373
def get_visualization_df(
74-
visualization_file: Union[str, Path, pd.DataFrame, None]
74+
visualization_file: Union[str, Path, pd.DataFrame, None]
7575
) -> Union[pd.DataFrame, None]:
7676
"""Read PEtab visualization table
7777
@@ -254,7 +254,7 @@ def flatten_timepoint_specific_output_overrides(
254254
255255
Arguments:
256256
petab_problem:
257-
PEtab problem to work on
257+
PEtab problem to work on. Modified in place.
258258
"""
259259
new_measurement_dfs = []
260260
new_observable_dfs = []
@@ -277,22 +277,21 @@ def flatten_timepoint_specific_output_overrides(
277277
for field, hyperparameter_type, target in [
278278
(NOISE_PARAMETERS, "noiseParameter", NOISE_FORMULA),
279279
(OBSERVABLE_PARAMETERS, "observableParameter", OBSERVABLE_FORMULA),
280+
(OBSERVABLE_PARAMETERS, "observableParameter", NOISE_FORMULA),
280281
]:
281-
if field in measurements:
282-
hyperparameter_replacement_id = (
283-
get_hyperparameter_replacement_id(
284-
hyperparameter_type=hyperparameter_type,
285-
observable_replacement_id=observable_replacement_id,
286-
)
287-
)
288-
hyperparameter_id = mappings[field][
289-
hyperparameter_replacement_id
290-
]
291-
observable[target] = re.sub(
292-
hyperparameter_id,
293-
hyperparameter_replacement_id,
294-
observable[target],
295-
)
282+
if field not in measurements:
283+
continue
284+
285+
hyperparameter_replacement_id = get_hyperparameter_replacement_id(
286+
hyperparameter_type=hyperparameter_type,
287+
observable_replacement_id=observable_replacement_id,
288+
)
289+
hyperparameter_id = mappings[field][hyperparameter_replacement_id]
290+
observable[target] = re.sub(
291+
hyperparameter_id,
292+
hyperparameter_replacement_id,
293+
observable[target],
294+
)
296295

297296
measurements[OBSERVABLE_ID] = observable_replacement_id
298297
new_measurement_dfs.append(measurements)
@@ -306,7 +305,7 @@ def flatten_timepoint_specific_output_overrides(
306305
def unflatten_simulation_df(
307306
simulation_df: pd.DataFrame,
308307
petab_problem: "petab.problem.Problem",
309-
) -> None:
308+
) -> pd.DataFrame:
310309
"""Unflatten simulations from a flattened PEtab problem.
311310
312311
A flattened PEtab problem is the output of applying

tests/test_petab.py

Lines changed: 32 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -353,36 +353,32 @@ def test_flatten_timepoint_specific_output_overrides():
353353
OBSERVABLE_FORMULA: [
354354
"observableParameter1_obs1 + observableParameter2_obs1"
355355
],
356-
NOISE_FORMULA: ["noiseParameter1_obs1"],
356+
NOISE_FORMULA: [
357+
"(observableParameter1_obs1 + observableParameter2_obs1) * noiseParameter1_obs1"
358+
],
357359
}
358360
)
359361
observable_df.set_index(OBSERVABLE_ID, inplace=True)
360362

363+
# new observable IDs (obs${i_obs}_${i_obsParOverride}_${i_noiseParOverride}_${i_condition})
364+
obs1_1_1_1 = "obs1__obsParOverride1_1_0__noiseParOverride1__condition1"
365+
obs1_2_1_1 = "obs1__obsParOverride2_1_0__noiseParOverride1__condition1"
366+
obs1_2_2_1 = "obs1__obsParOverride2_1_0__noiseParOverride2__condition1"
361367
observable_df_expected = pd.DataFrame(
362368
data={
363-
OBSERVABLE_ID: [
364-
"obs1__obsParOverride1_1_0__noiseParOverride1__condition1",
365-
"obs1__obsParOverride2_1_0__noiseParOverride1__condition1",
366-
"obs1__obsParOverride2_1_0__noiseParOverride2__condition1",
367-
],
369+
OBSERVABLE_ID: [obs1_1_1_1, obs1_2_1_1, obs1_2_2_1],
368370
OBSERVABLE_FORMULA: [
369-
"observableParameter1_obs1__obsParOverride1_1_0__"
370-
"noiseParOverride1__condition1 + observableParameter2_obs1"
371-
"__obsParOverride1_1_0__noiseParOverride1__condition1",
372-
"observableParameter1_obs1__obsParOverride2_1_0__noiseParOverride1"
373-
"__condition1 + observableParameter2_obs1__obsParOverride2_1_0"
374-
"__noiseParOverride1__condition1",
375-
"observableParameter1_obs1__obsParOverride2_1_0"
376-
"__noiseParOverride2__condition1 + observableParameter2_obs1__"
377-
"obsParOverride2_1_0__noiseParOverride2__condition1",
371+
f"observableParameter1_{obs1_1_1_1} + observableParameter2_{obs1_1_1_1}",
372+
f"observableParameter1_{obs1_2_1_1} + observableParameter2_{obs1_2_1_1}",
373+
f"observableParameter1_{obs1_2_2_1} + observableParameter2_{obs1_2_2_1}",
378374
],
379375
NOISE_FORMULA: [
380-
"noiseParameter1_obs1__obsParOverride1_1_0__"
381-
"noiseParOverride1__condition1",
382-
"noiseParameter1_obs1__obsParOverride2_1_0__"
383-
"noiseParOverride1__condition1",
384-
"noiseParameter1_obs1__obsParOverride2_1_0__"
385-
"noiseParOverride2__condition1",
376+
f"(observableParameter1_{obs1_1_1_1} + observableParameter2_{obs1_1_1_1})"
377+
f" * noiseParameter1_{obs1_1_1_1}",
378+
f"(observableParameter1_{obs1_2_1_1} + observableParameter2_{obs1_2_1_1})"
379+
f" * noiseParameter1_{obs1_2_1_1}",
380+
f"(observableParameter1_{obs1_2_2_1} + observableParameter2_{obs1_2_2_1})"
381+
f" * noiseParameter1_{obs1_2_2_1}",
386382
],
387383
}
388384
)
@@ -418,12 +414,7 @@ def test_flatten_timepoint_specific_output_overrides():
418414

419415
measurement_df_expected = pd.DataFrame(
420416
data={
421-
OBSERVABLE_ID: [
422-
"obs1__obsParOverride1_1_0__noiseParOverride1__condition1",
423-
"obs1__obsParOverride2_1_0__noiseParOverride1__condition1",
424-
"obs1__obsParOverride2_1_0__noiseParOverride2__condition1",
425-
"obs1__obsParOverride2_1_0__noiseParOverride2__condition1",
426-
],
417+
OBSERVABLE_ID: [obs1_1_1_1, obs1_2_1_1, obs1_2_2_1, obs1_2_2_1],
427418
SIMULATION_CONDITION_ID: [
428419
"condition1",
429420
"condition1",
@@ -472,8 +463,12 @@ def test_flatten_timepoint_specific_output_overrides():
472463
is False
473464
)
474465

475-
assert problem.observable_df.equals(observable_df_expected) is True
476-
assert problem.measurement_df.equals(measurement_df_expected) is True
466+
pd.testing.assert_frame_equal(
467+
problem.observable_df, observable_df_expected
468+
)
469+
pd.testing.assert_frame_equal(
470+
problem.measurement_df, measurement_df_expected
471+
)
477472

478473
assert petab.lint_problem(problem) is False
479474

@@ -591,8 +586,12 @@ def test_flatten_timepoint_specific_output_overrides_special_cases():
591586
is False
592587
)
593588

594-
assert problem.observable_df.equals(observable_df_expected) is True
595-
assert problem.measurement_df.equals(measurement_df_expected) is True
589+
pd.testing.assert_frame_equal(
590+
problem.observable_df, observable_df_expected
591+
)
592+
pd.testing.assert_frame_equal(
593+
problem.measurement_df, measurement_df_expected
594+
)
596595

597596
assert petab.lint_problem(problem) is False
598597

@@ -842,13 +841,14 @@ def test_get_required_parameters_for_parameter_table(petab_problem):
842841
# as part of the proportional error model.
843842
assert "observableParameter1_obs1" in noise_placeholders
844843

845-
required_parameters_for_parameter_table = \
844+
required_parameters_for_parameter_table = (
846845
petab.parameters.get_required_parameters_for_parameter_table(
847846
model=petab_problem.model,
848847
condition_df=petab_problem.condition_df,
849848
observable_df=petab_problem.observable_df,
850849
measurement_df=petab_problem.measurement_df,
851850
)
851+
)
852852
# The observable parameter is correctly recognized as a placeholder,
853853
# i.e. does not need to be in the parameter table.
854854
assert (

0 commit comments

Comments
 (0)