-
Notifications
You must be signed in to change notification settings - Fork 47
Expand file tree
/
Copy pathtest_amici_objective.py
More file actions
167 lines (141 loc) · 4.98 KB
/
test_amici_objective.py
File metadata and controls
167 lines (141 loc) · 4.98 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
"""
This is for testing the pypesto.Objective.
"""
import os
import amici
import benchmark_models_petab as models
import numpy as np
import petab.v1 as petab
import pytest
import pypesto
import pypesto.optimize as optimize
import pypesto.petab
from pypesto import C
from pypesto.objective.amici.amici_util import add_sim_grad_to_opt_grad
ATOL = 1e-1
RTOL = 1e-0
def test_add_sim_grad_to_opt_grad():
"""
Test gradient mapping/summation works as expected.
17 = 1 + 2*5 + 2*3
"""
par_opt_ids = ["opt_par_1", "opt_par_2", "opt_par_3"]
mapping_par_opt_to_par_sim = {
"sim_par_1": "opt_par_1",
"sim_par_2": "opt_par_3",
"sim_par_3": "opt_par_3",
}
par_sim_ids = ["sim_par_1", "sim_par_2", "sim_par_3"]
sim_grad = np.asarray([1.0, 3.0, 5.0])
opt_grad = np.asarray([1.0, 1.0, 1.0])
expected = np.asarray([3.0, 1.0, 17.0])
add_sim_grad_to_opt_grad(
par_opt_ids,
par_sim_ids,
mapping_par_opt_to_par_sim,
sim_grad,
opt_grad,
coefficient=2.0,
)
assert np.allclose(expected, opt_grad)
@pytest.mark.flaky(reruns=2)
def test_error_leastsquares_with_ssigma():
model_name = "Zheng_PNAS2012"
petab_problem = petab.Problem.from_yaml(
os.path.join(models.MODELS_DIR, model_name, model_name + ".yaml")
)
petab_problem.model_name = model_name
importer = pypesto.petab.PetabImporter(petab_problem)
obj = importer.create_objective()
problem = importer.create_problem(
obj, startpoint_kwargs={"check_fval": True, "check_grad": True}
)
optimizer = pypesto.optimize.ScipyOptimizer(
"ls_trf", options={"max_nfev": 50}
)
with pytest.raises(RuntimeError):
optimize.minimize(
problem=problem,
optimizer=optimizer,
n_starts=1,
options=optimize.OptimizeOptions(allow_failed_starts=False),
progress_bar=False,
)
@pytest.mark.flaky(reruns=5)
def test_preeq_guesses():
"""
Test whether optimization with preequilibration guesses works, asserts
that steadystate guesses are written and checks that gradient is still
correct with guesses set.
"""
model_name = "Brannmark_JBC2010"
importer = pypesto.petab.PetabImporter.from_yaml(
os.path.join(models.MODELS_DIR, model_name, model_name + ".yaml")
)
problem = importer.create_problem()
obj = problem.objective
obj.amici_solver.setNewtonMaxSteps(0)
obj.amici_model.setSteadyStateSensitivityMode(
amici.SteadyStateSensitivityMode.integrationOnly
)
obj.amici_solver.setAbsoluteTolerance(1e-12)
obj.amici_solver.setRelativeTolerance(1e-12)
# assert that initial guess is uninformative
assert obj.steadystate_guesses["fval"] == np.inf
optimizer = optimize.ScipyOptimizer()
problem.startpoint_method = pypesto.startpoint.UniformStartpoints(
check_fval=False
)
result = optimize.minimize(
problem=problem,
optimizer=optimizer,
n_starts=1,
progress_bar=False,
)
assert obj.steadystate_guesses["fval"] < np.inf
assert len(obj.steadystate_guesses["data"]) == len(obj.edatas)
# check that we have test a problem where plist is nontrivial
assert any(len(e.plist) != len(e.parameters) for e in obj.edatas)
df = obj.check_grad(
problem.get_reduced_vector(
result.optimize_result.list[0]["x"], problem.x_free_indices
),
eps=1e-3,
verbosity=0,
mode=C.MODE_FUN,
)
print("relative errors MODE_FUN: ", df.rel_err.values)
print("absolute errors MODE_FUN: ", df.abs_err.values)
assert np.all((df.rel_err.values < RTOL) | (df.abs_err.values < ATOL))
# assert that resetting works
problem.objective.initialize()
assert obj.steadystate_guesses["fval"] == np.inf
def test_edatas():
"""
Test whether optimization with preequilibration guesses works, asserts
that steadystate guesses are written and checks that gradient is still
correct with guesses set.
"""
model_name = "Brannmark_JBC2010"
importer = pypesto.petab.PetabImporter.from_yaml(
os.path.join(models.MODELS_DIR, model_name, model_name + ".yaml")
)
pars = np.asarray(importer.petab_problem.x_nominal_free_scaled)
full_objective = importer.create_objective()
edatas = importer.create_edatas()
full_result = full_objective(pars, return_dict=True)
for pm_full, edata in zip(
full_objective.parameter_mapping.parameter_mappings,
edatas,
strict=True,
):
sub_objective = importer.create_objective(edatas=[edata])
pm_sub = sub_objective.parameter_mapping.parameter_mappings[0]
for var, val in vars(pm_full).items():
assert getattr(pm_sub, var) == val, var
sub_result = sub_objective(pars, return_dict=True)
assert sub_result[C.FVAL] == sum(
-rdata["llh"]
for rdata in full_result[C.RDATAS]
if rdata.id == sub_result[C.RDATAS][0].id
)