Skip to content
Open
Changes from 1 commit
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
2844fdd
Add LASSO without structure to LinearRegression notebook
Blunde1 Dec 27, 2023
b0552e0
Transport observation samples, not deterministic responses
Blunde1 Jan 3, 2024
36c1f5c
Add specified observation noise, not just standard normal
Blunde1 Jan 3, 2024
c71b6c6
Run black
Blunde1 Jan 3, 2024
6f130a6
Run ruff
Blunde1 Jan 3, 2024
40b1e0e
Copy linear_l1_regression from graphite-maps
Blunde1 Jan 3, 2024
d2810b1
Rename parameters in linear_l1_regression for easier understanding
Blunde1 Jan 3, 2024
6a3b199
add sklearn and tqdm to docs
Blunde1 Jan 3, 2024
1474d54
Tidy up text in notebook
Blunde1 Jan 5, 2024
6310ad2
First draft and most important thoughts on objective in kalman-type m…
Blunde1 Jan 12, 2024
3fbc8eb
Strong opinions on language for perturbations added
Blunde1 Jan 15, 2024
7da4ea2
Be more consistent in mathematical notation
Blunde1 Jan 15, 2024
2f5ce09
Strong opinions on nll vs ls objectives even when equivalent estimators
Blunde1 Jan 15, 2024
b3c446a
TLDR and more strong opinions
Blunde1 Jan 17, 2024
e68a30c
Add explicit how to evaluate different methods
Blunde1 Jan 17, 2024
9614d63
Test to see if github renders block-level math
Blunde1 Jan 17, 2024
f042a0c
add newlines to make github recognize block-math format
Blunde1 Jan 17, 2024
a35aa19
Format equations in minimize model information-loss
Blunde1 Jan 17, 2024
a5a89b3
Use ^\ast instead of ^*
Blunde1 Jan 17, 2024
44fd48d
Brackets to clearly separate expressions
Blunde1 Jan 17, 2024
5d7ee69
Replace dollarsigns with mathblocks some places
Blunde1 Jan 17, 2024
50a4ced
Use github formatting for inline math
Blunde1 Jan 17, 2024
941c529
Comments on lasso without structure
Blunde1 Jan 17, 2024
5110a98
Comments on human understandable for enif
Blunde1 Jan 17, 2024
6b159e7
Make sure we condition on the correct d
Blunde1 Jan 17, 2024
a47ff3d
Be very clear on evaluation on test-data
Blunde1 Jan 17, 2024
d408c49
Fix typo
Blunde1 Jan 18, 2024
bc86d68
follows --> follow
Blunde1 Jan 22, 2024
e3142da
is -> are
Blunde1 Jan 22, 2024
3b99956
Fix typo "empirical"
Blunde1 Jan 22, 2024
d365049
Fix typo works->work
Blunde1 Jan 22, 2024
5775872
Fix typo yields->yield
Blunde1 Jan 22, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 45 additions & 4 deletions docs/source/LinearRegression.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.15.2
# jupytext_version: 1.16.0
# kernelspec:
# display_name: Python 3 (ipykernel)
# language: python
Expand Down Expand Up @@ -50,8 +50,9 @@
# %%
num_parameters = 25
num_observations = 100
num_ensemble = 30
num_ensemble = 50
prior_std = 1
obs_sd=1.0

# %%
rng = np.random.default_rng(42)
Expand All @@ -67,14 +68,14 @@ def g(X):

# Create observations: obs = g(x) + N(0, 1)
x_true = np.linspace(-1, 1, num=num_parameters)
observation_noise = rng.standard_normal(size=num_observations)
observation_noise = obs_sd * rng.standard_normal(size=num_observations)
observations = g(x_true) + observation_noise

# Initial ensemble X ~ N(0, prior_std) and diagonal covariance with ones
X = rng.normal(size=(num_parameters, num_ensemble)) * prior_std

# Covariance matches the noise added to observations above
covariance = np.ones(num_observations)
covariance = np.ones(num_observations) * obs_sd**2

# %% [markdown]
# ## Solve the maximum likelihood problem
Expand Down Expand Up @@ -170,3 +171,43 @@ def g(X):
plt.grid(True, ls="--", zorder=0, alpha=0.33)
plt.legend()
plt.show()

# %%
import scipy.sparse as sp
from graphite_maps.linear_regression import linear_l1_regression

# %%
X_prior = np.copy(X)
Y = g(X_prior)
D = Y + rng.standard_normal(size=Y.shape)
K = linear_l1_regression(U=D.T, Y=X_prior.T)
X_posterior = X_prior + K @ (observations - Y.T).T

# %%
plt.figure(figsize=(8, 3))
plt.scatter(np.arange(len(x_true)), x_true, label="True parameter values")
plt.scatter(np.arange(len(x_true)), x_ml, label="ML estimate (no prior)")
plt.scatter(
np.arange(len(x_true)), np.mean(X_posterior, axis=1), label="Posterior mean"
)

# Loop over every ensemble member and plot it
for j in range(num_ensemble):
# Jitter along the x-axis a little bit
x_jitter = np.arange(len(x_true)) + rng.normal(loc=0, scale=0.1, size=len(x_true))

# Plot this ensemble member
plt.scatter(
x_jitter,
X_posterior[:, j],
label=("Posterior values" if j == 0 else None),
color="black",
alpha=0.2,
s=5,
zorder=0,
)
plt.xlabel("Parameter index")
plt.ylabel("Parameter value")
plt.grid(True, ls="--", zorder=0, alpha=0.33)
plt.legend()
plt.show()