1+ r"""
2+ Relaxed Mumford-Shah regularization
3+ ======================================
4+ In this tutorial we will use a relaxed Mumford-Shah (rMS) functional [1] as regularization, which has the following form:
5+
6+
7+ .. math::
8+ \text{rMS}(x) = \min (\alpha\Vert x\Vert_2^2, \kappa).
9+
10+
11+ Its corresponding proximal operator is given by
12+
13+
14+ .. math::
15+ \text{prox}_{\text{rMS}}(x) =
16+ \begin{cases}
17+ \frac{1}{1+2\alpha}x & \text{ if } & \vert x\vert \leq \sqrt{\frac{\kappa}{\alpha}(1 + 2\alpha)} \\
18+ \kappa & \text{ else }
19+ \end{cases}.
20+
21+
22+ rMS is a combination of Tikhonov and TV regularization. Once the rMS hits a certain threshold, the solution will be allowed
23+ to jump due to the constant penalty $\kappa$, and below this value rMS will be smooth due to Tikhonov regularization.
24+ We show three denoising examples: one example that is well-suited for TV regularization and two examples where rMS
25+ outperforms TV and Tikhonov regularization, modeled after the experiments in [2].
26+
27+
28+ **References**
29+
30+ .. [1] Strekalovskiy, E., and D. Cremers, 2014, Real-time minimization of the piecewise smooth Mumford-Shah functional: European Conference on Computer Vision, 127–141
31+ .. [2] Kadu, A., and Kumar, R. and van Leeuwen, Tristan. Full-waveform inversion with Mumford-Shah regularization. SEG International Exposition and Annual Meeting, SEG-2018-2997224
32+
33+ """
34+
35+ import numpy as np
36+ import pylops
37+ import matplotlib .pyplot as plt
38+ import pyproximal
39+
40+ from pyproximal .proximal import *
41+ from pyproximal import ProxOperator
42+ from pyproximal .optimization .primaldual import *
43+
44+ from pylops import FirstDerivative
45+ from pyproximal import L1 , L2
46+ from pyproximal .proximal .rMS import rMS
47+ from pyproximal .optimization .primal import LinearizedADMM
48+ from pylops .optimization .leastsquares import regularized_inversion
49+
50+ np .random .seed (1 )
51+
52+ ###############################################################################
53+ # We start with a simple model with two jumps that is well-suited for TV regularization
54+
55+ # Create noisy data
56+ nx = 101
57+ idx_jump1 = nx // 3
58+ idx_jump2 = 3 * nx // 4
59+ x = np .zeros (nx )
60+ x [:idx_jump1 ] = 2
61+ x [idx_jump1 :idx_jump2 ] = 5
62+ n = np .random .normal (0 , 0.5 , nx )
63+ y = x + n
64+
65+ # Plot the model and the noisy data
66+ fig , axs = plt .subplots (1 , 1 , figsize = (6 , 5 ))
67+ axs .plot (x , label = 'True model' )
68+ axs .plot (y , label = 'Noisy model' )
69+ axs .legend ()
70+
71+ ###############################################################################
72+ # For all rMS and TV we use the Linearized ADMM and for Tikhonov we use LSQR
73+
74+ # Define functionals
75+ l2 = L2 (b = y )
76+ l1 = L1 (sigma = 5. )
77+ Dop = FirstDerivative (nx , edge = True , kind = 'backward' )
78+
79+ # TV
80+ L = np .real ((Dop .H * Dop ).eigs (neigs = 1 , which = 'LM' )[0 ])
81+ tau = 1.
82+ mu = 0.99 * tau / L
83+ xTV , _ = LinearizedADMM (l2 , l1 , Dop , tau = tau , mu = mu ,
84+ x0 = np .zeros_like (x ), niter = 200 )
85+
86+ # rMS
87+ sigma = 1e5
88+ kappa = 1e0
89+ ms_relaxed = rMS (sigma = sigma , kappa = kappa )
90+
91+ # Solve
92+ tau = 1
93+ mu = 1. / (tau * L )
94+
95+ xrMS , _ = LinearizedADMM (l2 , ms_relaxed , Dop , tau = tau , mu = mu ,
96+ x0 = np .zeros_like (x ), niter = 200 )
97+
98+ # Tikhonov
99+ Op = pylops .Identity (nx )
100+ Regs = [Dop ]
101+ epsR = [6e0 ]
102+
103+ xTikhonov = regularized_inversion (Op = Op , Regs = Regs , y = y , epsRs = epsR )[0 ]
104+
105+ # Plot the results
106+ fig , axs = plt .subplots (1 , 1 , figsize = (6 , 5 ))
107+ axs .plot (x , label = 'True' , linewidth = 4 , color = 'k' )
108+ axs .plot (y , '--' , label = 'Noisy' , linewidth = 2 , color = 'y' )
109+ axs .plot (xTV , label = 'TV' )
110+ axs .plot (xrMS , label = 'rMS' )
111+ axs .plot (xTikhonov , label = 'Tikhonov' )
112+ axs .legend ()
113+
114+ ###############################################################################
115+ # Next, we consider an example where we replace the first jump with a slope. As we will see, TV can not deal with this
116+ # type of structure since a linear increase will greatly increase the TV norm, and instead TV will make a staircase. rMS.
117+ # on the other hand, can reconstruct the model with high accuracy.
118+
119+ nx = 101
120+ idx_jump1 = nx // 3
121+ idx_jump2 = 3 * nx // 4
122+ x = np .zeros (nx )
123+ x [:idx_jump1 ] = 2
124+ x [idx_jump1 :idx_jump2 ] = np .linspace (2 , 4 , idx_jump2 - idx_jump1 )
125+ n = np .random .normal (0 , 0.25 , nx )
126+ y = x + n
127+
128+ # Define functionals
129+ l2 = L2 (b = y )
130+ Dop = FirstDerivative (nx , edge = True , kind = 'backward' )
131+
132+ # Plot the model and the noisy data
133+ fig , axs = plt .subplots (1 , 1 , figsize = (6 , 5 ));
134+ axs .plot (x , label = 'True model' );
135+ axs .plot (y , label = 'Noisy model' );
136+ axs .legend ();
137+
138+ ###############################################################################
139+
140+ # Define functionals
141+ l2 = L2 (b = y )
142+ l1 = L1 (sigma = 1. )
143+ Dop = FirstDerivative (nx , edge = True , kind = 'backward' )
144+
145+ # TV
146+ L = np .real ((Dop .H * Dop ).eigs (neigs = 1 , which = 'LM' )[0 ])
147+ tau = 1.
148+ mu = 0.99 * tau / L
149+ xTV , _ = LinearizedADMM (l2 , l1 , Dop , tau = tau , mu = mu ,
150+ x0 = np .zeros_like (x ), niter = 200 )
151+
152+ # rMS
153+ sigma = 1e1
154+ kappa = 1e0
155+ ms_relaxed = rMS (sigma = sigma , kappa = kappa )
156+
157+ # Solve
158+ tau = 1
159+ mu = 1. / (tau * L )
160+
161+ xrMS , _ = LinearizedADMM (l2 , ms_relaxed , Dop , tau = tau , mu = mu ,
162+ x0 = np .zeros_like (x ), niter = 200 )
163+
164+ # Tikhonov
165+ Op = pylops .Identity (nx )
166+ Regs = [Dop ]
167+ epsR = [3e0 ]
168+
169+ xTikhonov = regularized_inversion (Op = Op , Regs = Regs , y = y , epsRs = epsR )[0 ]
170+
171+ # Plot the results
172+ fig , axs = plt .subplots (1 , 1 , figsize = (6 , 5 ))
173+ axs .plot (x , label = 'True' , linewidth = 4 , color = 'k' )
174+ axs .plot (y , '--' , label = 'Noisy' , linewidth = 2 , color = 'y' )
175+ axs .plot (xTV , label = 'TV' )
176+ axs .plot (xrMS , label = 'rMS' )
177+ axs .plot (xTikhonov , label = 'Tikhonov' )
178+ axs .legend ()
179+
180+ ###############################################################################
181+ # Finally, we take a trace from a section of the Marmousi model. This trace shows rather smooth behavior with a few jumps,
182+ # which makes it perfectly suited for rMS. TV on the other hand will artificially create a staircasing effect.
183+
184+ # Get a trace from the model and add some noise
185+ m_trace = np .load ('../testdata/marmousi_trace.npy' )
186+ nz = len (m_trace )
187+ m_trace_noisy = m_trace + np .random .normal (0 , 0.1 , nz )
188+
189+ # Trace of the Marmousi model
190+ fig , ax = plt .subplots (1 , 1 , figsize = (6 ,5 ))
191+ ax .plot (m_trace , linewidth = 2 , label = 'True' )
192+ ax .plot (m_trace_noisy , label = 'Noisy' )
193+ ax .set_title ('Trace and noisy trace' )
194+ ax .axis ('tight' )
195+ ax .legend ()
196+ fig .tight_layout ()
197+
198+ ###############################################################################
199+
200+ # Define functionals
201+ l2 = L2 (b = m_trace_noisy )
202+ l1 = L1 (sigma = 5e-1 )
203+ Dop = FirstDerivative (nz , edge = True , kind = 'backward' )
204+
205+ # TV
206+ L = np .real ((Dop .H * Dop ).eigs (neigs = 1 , which = 'LM' )[0 ])
207+ tau = 1.
208+ mu = 0.99 * tau / L
209+ xTV , _ = LinearizedADMM (l2 , l1 , Dop , tau = tau , mu = mu ,
210+ x0 = np .zeros_like (m_trace ), niter = 200 )
211+
212+ # rMS
213+ sigma = 5e0
214+ kappa = 1e-1
215+ ms_relaxed = rMS (sigma = sigma , kappa = kappa )
216+
217+ # Solve
218+ tau = 1
219+ mu = 1. / (tau * L )
220+
221+ xrMS , _ = LinearizedADMM (l2 , ms_relaxed , Dop , tau = tau , mu = mu ,
222+ x0 = np .zeros_like (m_trace ), niter = 200 )
223+
224+ # Tikhonov
225+ Op = pylops .Identity (nz )
226+ Regs = [Dop ]
227+ epsR = [3e0 ]
228+
229+ xTikhonov = regularized_inversion (Op = Op , Regs = Regs , y = m_trace_noisy , epsRs = epsR )[0 ]
230+
231+ # Plot the results
232+ fig , axs = plt .subplots (1 , 1 , figsize = (6 , 5 ))
233+ axs .plot (m_trace , label = 'True' , linewidth = 4 , color = 'k' )
234+ axs .plot (m_trace_noisy , '--' , label = 'Noisy' , linewidth = 2 , color = 'y' )
235+ axs .plot (xTV , label = 'TV' )
236+ axs .plot (xrMS , label = 'rMS' )
237+ axs .plot (xTikhonov , label = 'Tikhonov' )
238+ axs .legend ()
0 commit comments