Skip to content

Commit b94e9de

Browse files
committed
maps paper update
1 parent 3b2fade commit b94e9de

File tree

1 file changed

+16
-15
lines changed

1 file changed

+16
-15
lines changed

_bibliography/papers.bib

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,22 @@
11
---
22
---
33
4+
@Article{tanguy2025constrained,
5+
abbr={COCV},
6+
author = {Tanguy, Eloi and Desolneux, Agn\`es and Delon, Julie},
7+
journal = {ESAIM: COCV},
8+
title = {Constrained Approximate Optimal Transport Maps},
9+
year = {2025},
10+
month = {August},
11+
abstract={We investigate finding a map \(g\) within a function class \(G\ \) that minimises an Optimal Transport (OT) cost between a target measure \(\nu\ \) and the image by \(g\ \) of a source measure \(\mu\ \). This is relevant when an OT map from \(\mu\ \) to \(\nu\ \) does not exist or does not satisfy the desired constraints of \(G\). We address existence and uniqueness for generic subclasses of \(L\)-Lipschitz functions, including gradients of (strongly) convex functions and typical Neural Networks. We explore a variant that approaches a transport plan, showing equivalence to a map problem in some cases. For the squared Euclidean cost, we propose alternating minimisation over a transport plan \(\pi\ \) and map \(g\ \), with the optimisation over \(g\ \) being the \(L^2\ \) projection on \(G\ \) of the barycentric mapping \(\overline{\pi}\). In dimension one, this global problem equates the \(L^2\ \) projection of \(\overline{\pi^*}\ \) onto \(G\ \) for an OT plan \(\pi^*\ \) between \(\mu\ \) and \(\nu\ \), but this does not extend to higher dimensions. We introduce a simple kernel method to find $g$ within a Reproducing Kernel Hilbert Space in the discrete case. Finally, we present numerical methods for \(L\)-Lipschitz gradients of \(\ell\)-strongly convex potentials, and study the convergence of Stochastic Gradient Descent methods for Neural Networks. We finish with an illustration on colour transfer, applying learned maps on new images, and showcasing outlier robustness.},
12+
pdf={constrained_maps.pdf},
13+
selected={true},
14+
arxiv={2407.13445},
15+
slides={constrained_maps_slides.pdf},
16+
bibtex_show={true},
17+
url={https://doi.org/10.1051/cocv/2025057}
18+
}
19+
420
@Article{tanguy2025sliced,
521
abbr={PREPRINT},
622
author = {Tanguy, Eloi and Chapel, Laetitia and Delon, Julie},
@@ -51,21 +67,6 @@ @Article{tanguy2024computing
5167
code={https://github.com/eloitanguy/ot_bar}
5268
}
5369

54-
@Article{tanguy2024constrained,
55-
abbr={PREPRINT},
56-
author = {Tanguy, Eloi and Desolneux, Agn\`es and Delon, Julie},
57-
journal = {arXiv preprint arXiv:2407.13445},
58-
title = {Constrained Approximate Optimal Transport Maps},
59-
year = {2024},
60-
month = {July},
61-
abstract={We investigate finding a map \(g\) within a function class \(G\ \) that minimises an Optimal Transport (OT) cost between a target measure \(\nu\ \) and the image by \(g\ \) of a source measure \(\mu\ \). This is relevant when an OT map from \(\mu\ \) to \(\nu\ \) does not exist or does not satisfy the desired constraints of \(G\). We address existence and uniqueness for generic subclasses of \(L\)-Lipschitz functions, including gradients of (strongly) convex functions and typical Neural Networks. We explore a variant that approaches a transport plan, showing equivalence to a map problem in some cases. For the squared Euclidean cost, we propose alternating minimisation over a transport plan \(\pi\ \) and map \(g\ \), with the optimisation over \(g\ \) being the \(L^2\ \) projection on \(G\ \) of the barycentric mapping \(\overline{\pi}\). In dimension one, this global problem equates the \(L^2\ \) projection of \(\overline{\pi^*}\ \) onto \(G\ \) for an OT plan \(\pi^*\ \) between \(\mu\ \) and \(\nu\ \), but this does not extend to higher dimensions. We introduce a simple kernel method to find $g$ within a Reproducing Kernel Hilbert Space in the discrete case. Finally, we present numerical methods for \(L\)-Lipschitz gradients of \(\ell\)-strongly convex potentials..},
62-
pdf={constrained_maps.pdf},
63-
selected={true},
64-
arxiv={2407.13445},
65-
slides={constrained_maps_slides.pdf},
66-
bibtex_show={true}
67-
}
68-
6970
@Article{tanguy2023discrete_sw_losses,
7071
abbr={MCOM},
7172
author = {Tanguy, Eloi and Flamary, R{\'e}mi and Delon, Julie},

0 commit comments

Comments
 (0)