Skip to content

Commit 2ab8625

Browse files
committed
inverse design seminar notebooks
1 parent bb848a9 commit 2ab8625

14 files changed

+4497
-2
lines changed

2025-10-09-invdes-seminar/00_setup_guide.ipynb

Lines changed: 344 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

2025-10-09-invdes-seminar/01_bayes.ipynb

Lines changed: 556 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

2025-10-09-invdes-seminar/02_adjoint.ipynb

Lines changed: 523 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

2025-10-09-invdes-seminar/03_sensitivity.ipynb

Lines changed: 1032 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

2025-10-09-invdes-seminar/04_adjoint_robust.ipynb

Lines changed: 441 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

2025-10-09-invdes-seminar/05_robust_comparison.ipynb

Lines changed: 427 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

2025-10-09-invdes-seminar/06_measurement_calibration.ipynb

Lines changed: 474 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

2025-10-09-invdes-seminar/optim.py

Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
"""Utility routines for functional-style optimization in the tutorial notebooks.
2+
3+
The helpers here avoid mutating inputs so they play nicely with autograd.
4+
"""
5+
6+
import autograd.numpy as np
7+
from autograd.misc import flatten
8+
9+
10+
def clip_params(params, bounds):
11+
"""Clip a parameter dictionary according to per-key bounds.
12+
13+
Parameters
14+
----------
15+
params : dict[str, np.ndarray]
16+
Dictionary mapping parameter names to array values.
17+
bounds : dict[str, tuple[float | None, float | None]]
18+
Lower and upper limits for each parameter. Missing keys default to no
19+
clipping. ``None`` disables a bound on that side.
20+
21+
Returns
22+
-------
23+
dict[str, np.ndarray]
24+
New dictionary with values clipped to the requested interval.
25+
"""
26+
clipped = {}
27+
for key, value in params.items():
28+
lo, hi = bounds.get(key, (None, None))
29+
lo_val = -np.inf if lo is None else lo
30+
hi_val = np.inf if hi is None else hi
31+
clipped[key] = np.clip(value, lo_val, hi_val)
32+
return clipped
33+
34+
35+
def _flatten(tree):
36+
"""Return a flat representation of a pytree and its inverse transform."""
37+
flat, unflatten = flatten(tree)
38+
return np.array(flat, dtype=float), unflatten
39+
40+
41+
def init_adam(params, lr=1e-2, beta1=0.9, beta2=0.999, eps=1e-8):
42+
"""Initialize Adam optimizer state for a parameter pytree.
43+
44+
Parameters
45+
----------
46+
params : dict[str, np.ndarray]
47+
Current parameter values used to size the optimizer state.
48+
lr : float = 1e-2
49+
Learning rate applied to each step.
50+
beta1 : float = 0.9
51+
Exponential decay applied to the first moment estimate.
52+
beta2 : float = 0.999
53+
Exponential decay applied to the second moment estimate.
54+
eps : float = 1e-8
55+
Numerical stabilizer added inside the square-root denominator.
56+
57+
Returns
58+
-------
59+
dict[str, object]
60+
Dictionary holding the Adam accumulator vectors and hyperparameters.
61+
"""
62+
flat_params, unflatten = _flatten(params)
63+
state = {
64+
"t": 0,
65+
"m": np.zeros_like(flat_params),
66+
"v": np.zeros_like(flat_params),
67+
"unflatten": unflatten,
68+
"lr": lr,
69+
"beta1": beta1,
70+
"beta2": beta2,
71+
"eps": eps,
72+
}
73+
return state
74+
75+
76+
def adam_update(grads, state):
77+
"""Compute Adam parameter updates from gradients and state.
78+
79+
Parameters
80+
----------
81+
grads : dict[str, np.ndarray]
82+
Gradient pytree with the same structure as the parameters.
83+
state : dict[str, object]
84+
Optimizer state returned by :func:`init_adam`.
85+
86+
Returns
87+
-------
88+
updates : dict[str, np.ndarray]
89+
Parameter deltas that should be subtracted from the current values.
90+
new_state : dict[str, object]
91+
Updated optimiser state after incorporating the gradients.
92+
"""
93+
g_flat, _ = _flatten(grads)
94+
t = state["t"] + 1
95+
96+
beta1 = state["beta1"]
97+
beta2 = state["beta2"]
98+
m = (1 - beta1) * g_flat + beta1 * state["m"]
99+
v = (1 - beta2) * (g_flat * g_flat) + beta2 * state["v"]
100+
101+
m_hat = m / (1 - beta1**t)
102+
v_hat = v / (1 - beta2**t)
103+
updates_flat = state["lr"] * (m_hat / (np.sqrt(v_hat) + state["eps"]))
104+
105+
new_state = {
106+
**state,
107+
"t": t,
108+
"m": m,
109+
"v": v,
110+
}
111+
updates = state["unflatten"](updates_flat)
112+
return updates, new_state
113+
114+
115+
def apply_updates(params, updates):
116+
"""Apply additive updates to a parameter pytree.
117+
118+
Parameters
119+
----------
120+
params : dict[str, np.ndarray]
121+
Original parameter dictionary.
122+
updates : dict[str, np.ndarray]
123+
Update dictionary produced by :func:`adam_update`.
124+
125+
Returns
126+
-------
127+
dict[str, np.ndarray]
128+
New dictionary with ``updates`` subtracted element-wise.
129+
"""
130+
p_flat, unflatten = _flatten(params)
131+
u_flat, _ = _flatten(updates)
132+
return unflatten(p_flat - u_flat)
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
{
2+
"widths_si": [
3+
0.48262618509355615,
4+
0.5207667397076212,
5+
0.45559630741287455,
6+
0.46361599591364383,
7+
0.44415937806339206,
8+
0.4725359297284237,
9+
0.4897650205171269,
10+
0.43401055421044743,
11+
0.5470685569556949,
12+
0.36090417995022805,
13+
0.35112952011499815,
14+
0.25182851621035,
15+
0.2287991538613288,
16+
0.21772272282716135,
17+
0.5871457399636976
18+
],
19+
"gaps_si": [
20+
0.6608377498745214,
21+
0.7153002966535659,
22+
0.6755416250853287,
23+
0.762711911245917,
24+
0.6957603543580327,
25+
0.6485980725930465,
26+
0.7270242877193821,
27+
0.6569877864900205,
28+
0.7434394276954258,
29+
0.8910689853995577,
30+
0.92010444487145,
31+
0.887662287039533,
32+
0.8439724990649012,
33+
0.7880932609023489,
34+
0.7992416233438039
35+
],
36+
"widths_sin": [
37+
0.7891411537966333,
38+
0.6441362131696193,
39+
0.5221408734233975,
40+
0.31370712049190075,
41+
0.6036396259080945,
42+
0.5709134822507435,
43+
0.6102929883304251,
44+
0.5666814968867978,
45+
0.5911545201167835,
46+
0.5511240455381605,
47+
0.6759490391650566,
48+
0.424347404772533,
49+
0.4917036091769178,
50+
0.5990651442832533,
51+
0.7041841301345496
52+
],
53+
"gaps_sin": [
54+
0.4541255482246594,
55+
0.4802605152344745,
56+
0.3,
57+
0.4914339475589058,
58+
0.5480930702315364,
59+
0.6026168524939672,
60+
0.6561924161853298,
61+
0.5934806415337143,
62+
0.478494886109227,
63+
0.44772190354423175,
64+
0.7331937769153588,
65+
0.6299485623886972,
66+
0.48849470041329063,
67+
0.35636407607194925,
68+
0.5135103145142313
69+
],
70+
"first_gap_si": -0.6720330444742626,
71+
"first_gap_sin": 0.5035568088634116,
72+
"target_power": 0.5676497430872463
73+
}
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
{
2+
"widths_si": [
3+
0.4920388608057116,
4+
0.5269663382801457,
5+
0.47051077631906724,
6+
0.45640313960745277,
7+
0.4463409120598345,
8+
0.479710427467526,
9+
0.4926663437517315,
10+
0.44457287894940967,
11+
0.545014746020923,
12+
0.36598776983110565,
13+
0.3329813653191155,
14+
0.2468853437833761,
15+
0.23766189386793363,
16+
0.23602095263192163,
17+
0.5993116787228173
18+
],
19+
"gaps_si": [
20+
0.6594384762369605,
21+
0.7075580062177993,
22+
0.6811015769993951,
23+
0.7457468608769762,
24+
0.6946555894542846,
25+
0.6444157147446795,
26+
0.7121732105630795,
27+
0.6468550006640721,
28+
0.7334476826411184,
29+
0.8848483116592654,
30+
0.926354310270442,
31+
0.9035257883720766,
32+
0.8629975364993915,
33+
0.8031133570655837,
34+
0.7992416233438039
35+
],
36+
"widths_sin": [
37+
0.8194008890879501,
38+
0.654976203326419,
39+
0.5154062669153403,
40+
0.28937104202011993,
41+
0.5797843891332557,
42+
0.5547076037062613,
43+
0.6077917250899648,
44+
0.5628056343171124,
45+
0.5933617318869786,
46+
0.548751792878745,
47+
0.6887370577523546,
48+
0.41013832947358364,
49+
0.4913432960091517,
50+
0.6137520507037625,
51+
0.7155681547704924
52+
],
53+
"gaps_sin": [
54+
0.45753036557675575,
55+
0.5098708096717812,
56+
0.3,
57+
0.49309132198664574,
58+
0.5499656317615206,
59+
0.6082880193683878,
60+
0.6647065648783161,
61+
0.6049556315727266,
62+
0.48280853554230346,
63+
0.45039397074013265,
64+
0.7420847046346506,
65+
0.6328210425726839,
66+
0.5020955912266114,
67+
0.3587164271526588,
68+
0.5135103145142313
69+
],
70+
"first_gap_si": -0.6811767256145679,
71+
"first_gap_sin": 0.4802966569812285,
72+
"etch_bias_modeled": 0.02
73+
}

0 commit comments

Comments
 (0)