xref: /petsc/src/tao/constrained/impls/admm/admm.c (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 #include <../src/tao/constrained/impls/admm/admm.h> /*I "petsctao.h" I*/
2 #include <petsctao.h>
3 #include <petsc/private/petscimpl.h>
4 
5 /* Updates terminating criteria
6  *
7  * 1  ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||}
8  *
9  * 2. Updates dual residual, d_k
10  *
11  * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty||   */
12 
13 static PetscBool  cited      = PETSC_FALSE;
14 static const char citation[] = "@misc{xu2017adaptive,\n"
15                                "   title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n"
16                                "   author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n"
17                                "   year={2017},\n"
18                                "   eprint={1704.02712},\n"
19                                "   archivePrefix={arXiv},\n"
20                                "   primaryClass={cs.CV}\n"
21                                "}  \n";
22 
23 const char *const TaoADMMRegularizerTypes[] = {"REGULARIZER_USER", "REGULARIZER_SOFT_THRESH", "TaoADMMRegularizerType", "TAO_ADMM_", NULL};
24 const char *const TaoADMMUpdateTypes[]      = {"UPDATE_BASIC", "UPDATE_ADAPTIVE", "UPDATE_ADAPTIVE_RELAXED", "TaoADMMUpdateType", "TAO_ADMM_", NULL};
25 const char *const TaoALMMTypes[]            = {"CLASSIC", "PHR", "TaoALMMType", "TAO_ALMM_", NULL};
26 
27 static PetscErrorCode TaoADMMToleranceUpdate(Tao tao) {
28   TAO_ADMM *am = (TAO_ADMM *)tao->data;
29   PetscReal Axnorm, Bznorm, ATynorm, temp;
30   Vec       tempJR, tempL;
31   Tao       mis;
32 
33   PetscFunctionBegin;
34   mis    = am->subsolverX;
35   tempJR = am->workJacobianRight;
36   tempL  = am->workLeft;
37   /* ATy */
38   PetscCall(TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre));
39   PetscCall(MatMultTranspose(mis->jacobian_equality, am->y, tempJR));
40   PetscCall(VecNorm(tempJR, NORM_2, &ATynorm));
41   /* dualres = mu * ||AT(Bz-Bzold)||_2 */
42   PetscCall(VecWAXPY(tempJR, -1., am->Bzold, am->Bz));
43   PetscCall(MatMultTranspose(mis->jacobian_equality, tempJR, tempL));
44   PetscCall(VecNorm(tempL, NORM_2, &am->dualres));
45   am->dualres *= am->mu;
46 
47   /* ||Ax||_2, ||Bz||_2 */
48   PetscCall(VecNorm(am->Ax, NORM_2, &Axnorm));
49   PetscCall(VecNorm(am->Bz, NORM_2, &Bznorm));
50 
51   /* Set catol to be catol_admm *  max{||Ax||,||Bz||,||c||} *
52    * Set gatol to be gatol_admm *  ||A^Ty|| *
53    * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */
54   temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm, am->const_norm));
55   PetscCall(TaoSetConstraintTolerances(tao, temp, PETSC_DEFAULT));
56   PetscCall(TaoSetTolerances(tao, am->gatol_admm * ATynorm, PETSC_DEFAULT, PETSC_DEFAULT));
57   PetscFunctionReturn(0);
58 }
59 
60 /* Penaly Update for Adaptive ADMM. */
61 static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao) {
62   TAO_ADMM *am = (TAO_ADMM *)tao->data;
63   PetscReal ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k;
64   PetscBool hflag, gflag;
65   Vec       tempJR, tempJR2;
66 
67   PetscFunctionBegin;
68   tempJR  = am->workJacobianRight;
69   tempJR2 = am->workJacobianRight2;
70   hflag   = PETSC_FALSE;
71   gflag   = PETSC_FALSE;
72   a_k     = -1;
73   b_k     = -1;
74 
75   PetscCall(VecWAXPY(tempJR, -1., am->Axold, am->Ax));
76   PetscCall(VecWAXPY(tempJR2, -1., am->yhatold, am->yhat));
77   PetscCall(VecNorm(tempJR, NORM_2, &Axdiff_norm));
78   PetscCall(VecNorm(tempJR2, NORM_2, &yhatdiff_norm));
79   PetscCall(VecDot(tempJR, tempJR2, &Axyhat));
80 
81   PetscCall(VecWAXPY(tempJR, -1., am->Bz0, am->Bz));
82   PetscCall(VecWAXPY(tempJR2, -1., am->y, am->y0));
83   PetscCall(VecNorm(tempJR, NORM_2, &Bzdiff_norm));
84   PetscCall(VecNorm(tempJR2, NORM_2, &ydiff_norm));
85   PetscCall(VecDot(tempJR, tempJR2, &Bzy));
86 
87   if (Axyhat > am->orthval * Axdiff_norm * yhatdiff_norm + am->mueps) {
88     hflag = PETSC_TRUE;
89     a_sd  = PetscSqr(yhatdiff_norm) / Axyhat; /* alphaSD */
90     a_mg  = Axyhat / PetscSqr(Axdiff_norm);   /* alphaMG */
91     a_k   = (a_mg / a_sd) > 0.5 ? a_mg : a_sd - 0.5 * a_mg;
92   }
93   if (Bzy > am->orthval * Bzdiff_norm * ydiff_norm + am->mueps) {
94     gflag = PETSC_TRUE;
95     b_sd  = PetscSqr(ydiff_norm) / Bzy;  /* betaSD */
96     b_mg  = Bzy / PetscSqr(Bzdiff_norm); /* betaMG */
97     b_k   = (b_mg / b_sd) > 0.5 ? b_mg : b_sd - 0.5 * b_mg;
98   }
99   am->muold = am->mu;
100   if (gflag && hflag) {
101     am->mu = PetscSqrtReal(a_k * b_k);
102   } else if (hflag) {
103     am->mu = a_k;
104   } else if (gflag) {
105     am->mu = b_k;
106   }
107   if (am->mu > am->muold) { am->mu = am->muold; }
108   if (am->mu < am->mumin) { am->mu = am->mumin; }
109   PetscFunctionReturn(0);
110 }
111 
112 static PetscErrorCode TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type) {
113   TAO_ADMM *am = (TAO_ADMM *)tao->data;
114 
115   PetscFunctionBegin;
116   am->regswitch = type;
117   PetscFunctionReturn(0);
118 }
119 
120 static PetscErrorCode TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type) {
121   TAO_ADMM *am = (TAO_ADMM *)tao->data;
122 
123   PetscFunctionBegin;
124   *type = am->regswitch;
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type) {
129   TAO_ADMM *am = (TAO_ADMM *)tao->data;
130 
131   PetscFunctionBegin;
132   am->update = type;
133   PetscFunctionReturn(0);
134 }
135 
136 static PetscErrorCode TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type) {
137   TAO_ADMM *am = (TAO_ADMM *)tao->data;
138 
139   PetscFunctionBegin;
140   *type = am->update;
141   PetscFunctionReturn(0);
142 }
143 
144 /* This routine updates Jacobians with new x,z vectors,
145  * and then updates Ax and Bz vectors, then computes updated residual vector*/
146 static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual) {
147   TAO_ADMM *am = (TAO_ADMM *)tao->data;
148   Tao       mis, reg;
149 
150   PetscFunctionBegin;
151   mis = am->subsolverX;
152   reg = am->subsolverZ;
153   PetscCall(TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre));
154   PetscCall(MatMult(mis->jacobian_equality, x, Ax));
155   PetscCall(TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre));
156   PetscCall(MatMult(reg->jacobian_equality, z, Bz));
157 
158   PetscCall(VecWAXPY(residual, 1., Bz, Ax));
159   if (am->constraint != NULL) { PetscCall(VecAXPY(residual, -1., am->constraint)); }
160   PetscFunctionReturn(0);
161 }
162 
163 /* Updates Augmented Lagrangians to given routines *
164  * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet
165  * Separate Objective and Gradient routines are not supported.  */
166 static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr) {
167   Tao       parent = (Tao)ptr;
168   TAO_ADMM *am     = (TAO_ADMM *)parent->data;
169   PetscReal temp, temp2;
170   Vec       tempJR;
171 
172   PetscFunctionBegin;
173   tempJR = am->workJacobianRight;
174   PetscCall(ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
175   PetscCall((*am->ops->misfitobjgrad)(am->subsolverX, x, f, g, am->misfitobjgradP));
176 
177   am->last_misfit_val = *f;
178   /* Objective  Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
179   PetscCall(VecTDot(am->residual, am->y, &temp));
180   PetscCall(VecTDot(am->residual, am->residual, &temp2));
181   *f += temp + (am->mu / 2) * temp2;
182 
183   /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/
184   PetscCall(MatMultTranspose(tao->jacobian_equality, am->residual, tempJR));
185   PetscCall(VecAXPY(g, am->mu, tempJR));
186   PetscCall(MatMultTranspose(tao->jacobian_equality, am->y, tempJR));
187   PetscCall(VecAXPY(g, 1., tempJR));
188   PetscFunctionReturn(0);
189 }
190 
191 /* Updates Augmented Lagrangians to given routines
192  * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet
193  * Separate Objective and Gradient routines are not supported.  */
194 static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr) {
195   Tao       parent = (Tao)ptr;
196   TAO_ADMM *am     = (TAO_ADMM *)parent->data;
197   PetscReal temp, temp2;
198   Vec       tempJR;
199 
200   PetscFunctionBegin;
201   tempJR = am->workJacobianRight;
202   PetscCall(ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual));
203   PetscCall((*am->ops->regobjgrad)(am->subsolverZ, z, f, g, am->regobjgradP));
204   am->last_reg_val = *f;
205   /* Objective  Add  + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
206   PetscCall(VecTDot(am->residual, am->y, &temp));
207   PetscCall(VecTDot(am->residual, am->residual, &temp2));
208   *f += temp + (am->mu / 2) * temp2;
209 
210   /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/
211   PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->residual, tempJR));
212   PetscCall(VecAXPY(g, am->mu, tempJR));
213   PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->y, tempJR));
214   PetscCall(VecAXPY(g, 1., tempJR));
215   PetscFunctionReturn(0);
216 }
217 
218 /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */
219 static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm) {
220   TAO_ADMM *am = (TAO_ADMM *)tao->data;
221   PetscInt  N;
222 
223   PetscFunctionBegin;
224   PetscCall(VecGetSize(am->workLeft, &N));
225   PetscCall(VecPointwiseMult(am->workLeft, x, x));
226   PetscCall(VecShift(am->workLeft, am->l1epsilon * am->l1epsilon));
227   PetscCall(VecSqrtAbs(am->workLeft));
228   PetscCall(VecSum(am->workLeft, norm));
229   *norm += N * am->l1epsilon;
230   *norm *= am->lambda;
231   PetscFunctionReturn(0);
232 }
233 
234 static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr) {
235   TAO_ADMM *am = (TAO_ADMM *)ptr;
236 
237   PetscFunctionBegin;
238   switch (am->update) {
239   case (TAO_ADMM_UPDATE_BASIC): break;
240   case (TAO_ADMM_UPDATE_ADAPTIVE):
241   case (TAO_ADMM_UPDATE_ADAPTIVE_RELAXED):
242     if (H && (am->muold != am->mu)) {
243       if (!Identity) {
244         PetscCall(MatAXPY(H, am->mu - am->muold, Constraint, DIFFERENT_NONZERO_PATTERN));
245       } else {
246         PetscCall(MatShift(H, am->mu - am->muold));
247       }
248     }
249     break;
250   }
251   PetscFunctionReturn(0);
252 }
253 
254 /* Updates Hessian - adds second derivative of augmented Lagrangian
255  * H \gets H + \rho*ATA
256  * Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op
257  * For ADAPTAIVE,ADAPTIVE_RELAXED,
258  * H \gets H + (\rho-\rhoold)*ATA
259  * Here, we assume that A is linear constraint i.e., doesnt change.
260  * Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */
261 static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr) {
262   Tao       parent = (Tao)ptr;
263   TAO_ADMM *am     = (TAO_ADMM *)parent->data;
264 
265   PetscFunctionBegin;
266   if (am->Hxchange) {
267     /* Case where Hessian gets updated with respect to x vector input. */
268     PetscCall((*am->ops->misfithess)(am->subsolverX, x, H, Hpre, am->misfithessP));
269     PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
270   } else if (am->Hxbool) {
271     /* Hessian doesn't get updated. H(x) = c */
272     /* Update Lagrangian only only per TAO call */
273     PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
274     am->Hxbool = PETSC_FALSE;
275   }
276   PetscFunctionReturn(0);
277 }
278 
279 /* Same as SubHessianUpdate, except for B matrix instead of A matrix */
280 static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr) {
281   Tao       parent = (Tao)ptr;
282   TAO_ADMM *am     = (TAO_ADMM *)parent->data;
283 
284   PetscFunctionBegin;
285 
286   if (am->Hzchange) {
287     /* Case where Hessian gets updated with respect to x vector input. */
288     PetscCall((*am->ops->reghess)(am->subsolverZ, z, H, Hpre, am->reghessP));
289     PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
290   } else if (am->Hzbool) {
291     /* Hessian doesn't get updated. H(x) = c */
292     /* Update Lagrangian only only per TAO call */
293     PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
294     am->Hzbool = PETSC_FALSE;
295   }
296   PetscFunctionReturn(0);
297 }
298 
299 /* Shell Matrix routine for A matrix.
300  * This gets used when user puts NULL for
301  * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...)
302  * Essentially sets A=I*/
303 static PetscErrorCode JacobianIdentity(Mat mat, Vec in, Vec out) {
304   PetscFunctionBegin;
305   PetscCall(VecCopy(in, out));
306   PetscFunctionReturn(0);
307 }
308 
309 /* Shell Matrix routine for B matrix.
310  * This gets used when user puts NULL for
311  * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...)
312  * Sets B=-I */
313 static PetscErrorCode JacobianIdentityB(Mat mat, Vec in, Vec out) {
314   PetscFunctionBegin;
315   PetscCall(VecCopy(in, out));
316   PetscCall(VecScale(out, -1.));
317   PetscFunctionReturn(0);
318 }
319 
320 /* Solve f(x) + g(z) s.t. Ax + Bz = c */
321 static PetscErrorCode TaoSolve_ADMM(Tao tao) {
322   TAO_ADMM *am = (TAO_ADMM *)tao->data;
323   PetscInt  N;
324   PetscReal reg_func;
325   PetscBool is_reg_shell;
326   Vec       tempL;
327 
328   PetscFunctionBegin;
329   if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
330     PetscCheck(am->subsolverX->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetMisfitConstraintJacobian() first");
331     PetscCheck(am->subsolverZ->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetRegularizerConstraintJacobian() first");
332     if (am->constraint != NULL) { PetscCall(VecNorm(am->constraint, NORM_2, &am->const_norm)); }
333   }
334   tempL = am->workLeft;
335   PetscCall(VecGetSize(tempL, &N));
336 
337   if (am->Hx && am->ops->misfithess) { PetscCall(TaoSetHessian(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao)); }
338 
339   if (!am->zJI) {
340     /* Currently, B is assumed to be a linear system, i.e., not getting updated*/
341     PetscCall(MatTransposeMatMult(am->JB, am->JB, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &(am->BTB)));
342   }
343   if (!am->xJI) {
344     /* Currently, A is assumed to be a linear system, i.e., not getting updated*/
345     PetscCall(MatTransposeMatMult(am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &(am->ATA)));
346   }
347 
348   is_reg_shell = PETSC_FALSE;
349 
350   PetscCall(PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell));
351 
352   if (!is_reg_shell) {
353     switch (am->regswitch) {
354     case (TAO_ADMM_REGULARIZER_USER): break;
355     case (TAO_ADMM_REGULARIZER_SOFT_THRESH):
356       /* Soft Threshold. */
357       break;
358     }
359     if (am->ops->regobjgrad) PetscCall(TaoSetObjectiveAndGradient(am->subsolverZ, NULL, RegObjGradUpdate, tao));
360     if (am->Hz && am->ops->reghess) { PetscCall(TaoSetHessian(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao)); }
361   }
362 
363   switch (am->update) {
364   case TAO_ADMM_UPDATE_BASIC:
365     if (am->subsolverX->hessian) {
366       /* In basic case, Hessian does not get updated w.r.t. to spectral penalty
367        * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/
368       if (!am->xJI) {
369         PetscCall(MatAXPY(am->subsolverX->hessian, am->mu, am->ATA, DIFFERENT_NONZERO_PATTERN));
370       } else {
371         PetscCall(MatShift(am->subsolverX->hessian, am->mu));
372       }
373     }
374     if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) {
375       if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) {
376         PetscCall(MatAXPY(am->subsolverZ->hessian, am->mu, am->BTB, DIFFERENT_NONZERO_PATTERN));
377       } else {
378         PetscCall(MatShift(am->subsolverZ->hessian, am->mu));
379       }
380     }
381     break;
382   case TAO_ADMM_UPDATE_ADAPTIVE:
383   case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED: break;
384   }
385 
386   PetscCall(PetscCitationsRegister(citation, &cited));
387   tao->reason = TAO_CONTINUE_ITERATING;
388 
389   while (tao->reason == TAO_CONTINUE_ITERATING) {
390     PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);
391     PetscCall(VecCopy(am->Bz, am->Bzold));
392 
393     /* x update */
394     PetscCall(TaoSolve(am->subsolverX));
395     PetscCall(TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre));
396     PetscCall(MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution, am->Ax));
397 
398     am->Hxbool = PETSC_TRUE;
399 
400     /* z update */
401     switch (am->regswitch) {
402     case TAO_ADMM_REGULARIZER_USER: PetscCall(TaoSolve(am->subsolverZ)); break;
403     case TAO_ADMM_REGULARIZER_SOFT_THRESH:
404       /* L1 assumes A,B jacobians are identity nxn matrix */
405       PetscCall(VecWAXPY(am->workJacobianRight, 1 / am->mu, am->y, am->Ax));
406       PetscCall(TaoSoftThreshold(am->workJacobianRight, -am->lambda / am->mu, am->lambda / am->mu, am->subsolverZ->solution));
407       break;
408     }
409     am->Hzbool = PETSC_TRUE;
410     /* Returns Ax + Bz - c with updated Ax,Bz vectors */
411     PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
412     /* Dual variable, y += y + mu*(Ax+Bz-c) */
413     PetscCall(VecWAXPY(am->y, am->mu, am->residual, am->yold));
414 
415     /* stopping tolerance update */
416     PetscCall(TaoADMMToleranceUpdate(tao));
417 
418     /* Updating Spectral Penalty */
419     switch (am->update) {
420     case TAO_ADMM_UPDATE_BASIC: am->muold = am->mu; break;
421     case TAO_ADMM_UPDATE_ADAPTIVE:
422     case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
423       if (tao->niter == 0) {
424         PetscCall(VecCopy(am->y, am->y0));
425         PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
426         if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
427         PetscCall(VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold));
428         PetscCall(VecCopy(am->Ax, am->Axold));
429         PetscCall(VecCopy(am->Bz, am->Bz0));
430         am->muold = am->mu;
431       } else if (tao->niter % am->T == 1) {
432         /* we have compute Bzold in a previous iteration, and we computed Ax above */
433         PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
434         if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
435         PetscCall(VecWAXPY(am->yhat, -am->mu, am->residual, am->yold));
436         PetscCall(AdaptiveADMMPenaltyUpdate(tao));
437         PetscCall(VecCopy(am->Ax, am->Axold));
438         PetscCall(VecCopy(am->Bz, am->Bz0));
439         PetscCall(VecCopy(am->yhat, am->yhatold));
440         PetscCall(VecCopy(am->y, am->y0));
441       } else {
442         am->muold = am->mu;
443       }
444       break;
445     default: break;
446     }
447     tao->niter++;
448 
449     /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/
450     switch (am->regswitch) {
451     case TAO_ADMM_REGULARIZER_USER:
452       if (is_reg_shell) {
453         PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, &reg_func));
454       } else {
455         (*am->ops->regobjgrad)(am->subsolverZ, am->subsolverX->solution, &reg_func, tempL, am->regobjgradP);
456       }
457       break;
458     case TAO_ADMM_REGULARIZER_SOFT_THRESH: PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, &reg_func)); break;
459     }
460     PetscCall(VecCopy(am->y, am->yold));
461     PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
462     PetscCall(VecNorm(am->residual, NORM_2, &am->resnorm));
463     PetscCall(TaoLogConvergenceHistory(tao, am->last_misfit_val + reg_func, am->dualres, am->resnorm, tao->ksp_its));
464 
465     PetscCall(TaoMonitor(tao, tao->niter, am->last_misfit_val + reg_func, am->dualres, am->resnorm, 1.0));
466     PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
467   }
468   /* Update vectors */
469   PetscCall(VecCopy(am->subsolverX->solution, tao->solution));
470   PetscCall(VecCopy(am->subsolverX->gradient, tao->gradient));
471   PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", NULL));
472   PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", NULL));
473   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
474   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
475   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
476   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
477   PetscFunctionReturn(0);
478 }
479 
480 static PetscErrorCode TaoSetFromOptions_ADMM(Tao tao, PetscOptionItems *PetscOptionsObject) {
481   TAO_ADMM *am = (TAO_ADMM *)tao->data;
482 
483   PetscFunctionBegin;
484   PetscOptionsHeadBegin(PetscOptionsObject, "ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. ");
485   PetscCall(PetscOptionsReal("-tao_admm_regularizer_coefficient", "regularizer constant", "", am->lambda, &am->lambda, NULL));
486   PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty", "Constant for Augmented Lagrangian term.", "", am->mu, &am->mu, NULL));
487   PetscCall(PetscOptionsReal("-tao_admm_relaxation_parameter", "x relaxation parameter for Z update.", "", am->gamma, &am->gamma, NULL));
488   PetscCall(PetscOptionsReal("-tao_admm_tolerance_update_factor", "ADMM dynamic tolerance update factor.", "", am->tol, &am->tol, NULL));
489   PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty_update_factor", "ADMM spectral penalty update curvature safeguard value.", "", am->orthval, &am->orthval, NULL));
490   PetscCall(PetscOptionsReal("-tao_admm_minimum_spectral_penalty", "Set ADMM minimum spectral penalty.", "", am->mumin, &am->mumin, NULL));
491   PetscCall(PetscOptionsEnum("-tao_admm_dual_update", "Lagrangian dual update policy", "TaoADMMUpdateType", TaoADMMUpdateTypes, (PetscEnum)am->update, (PetscEnum *)&am->update, NULL));
492   PetscCall(PetscOptionsEnum("-tao_admm_regularizer_type", "ADMM regularizer update rule", "TaoADMMRegularizerType", TaoADMMRegularizerTypes, (PetscEnum)am->regswitch, (PetscEnum *)&am->regswitch, NULL));
493   PetscOptionsHeadEnd();
494   PetscCall(TaoSetFromOptions(am->subsolverX));
495   if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) { PetscCall(TaoSetFromOptions(am->subsolverZ)); }
496   PetscFunctionReturn(0);
497 }
498 
499 static PetscErrorCode TaoView_ADMM(Tao tao, PetscViewer viewer) {
500   TAO_ADMM *am = (TAO_ADMM *)tao->data;
501 
502   PetscFunctionBegin;
503   PetscCall(PetscViewerASCIIPushTab(viewer));
504   PetscCall(TaoView(am->subsolverX, viewer));
505   PetscCall(TaoView(am->subsolverZ, viewer));
506   PetscCall(PetscViewerASCIIPopTab(viewer));
507   PetscFunctionReturn(0);
508 }
509 
510 static PetscErrorCode TaoSetUp_ADMM(Tao tao) {
511   TAO_ADMM *am = (TAO_ADMM *)tao->data;
512   PetscInt  n, N, M;
513 
514   PetscFunctionBegin;
515   PetscCall(VecGetLocalSize(tao->solution, &n));
516   PetscCall(VecGetSize(tao->solution, &N));
517   /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */
518   if (!am->JB) {
519     am->zJI = PETSC_TRUE;
520     PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JB));
521     PetscCall(MatShellSetOperation(am->JB, MATOP_MULT, (void (*)(void))JacobianIdentityB));
522     PetscCall(MatShellSetOperation(am->JB, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentityB));
523     am->JBpre = am->JB;
524   }
525   if (!am->JA) {
526     am->xJI = PETSC_TRUE;
527     PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JA));
528     PetscCall(MatShellSetOperation(am->JA, MATOP_MULT, (void (*)(void))JacobianIdentity));
529     PetscCall(MatShellSetOperation(am->JA, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentity));
530     am->JApre = am->JA;
531   }
532   PetscCall(MatCreateVecs(am->JA, NULL, &am->Ax));
533   if (!tao->gradient) { PetscCall(VecDuplicate(tao->solution, &tao->gradient)); }
534   PetscCall(TaoSetSolution(am->subsolverX, tao->solution));
535   if (!am->z) {
536     PetscCall(VecDuplicate(tao->solution, &am->z));
537     PetscCall(VecSet(am->z, 0.0));
538   }
539   PetscCall(TaoSetSolution(am->subsolverZ, am->z));
540   if (!am->workLeft) { PetscCall(VecDuplicate(tao->solution, &am->workLeft)); }
541   if (!am->Axold) { PetscCall(VecDuplicate(am->Ax, &am->Axold)); }
542   if (!am->workJacobianRight) { PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight)); }
543   if (!am->workJacobianRight2) { PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight2)); }
544   if (!am->Bz) { PetscCall(VecDuplicate(am->Ax, &am->Bz)); }
545   if (!am->Bzold) { PetscCall(VecDuplicate(am->Ax, &am->Bzold)); }
546   if (!am->Bz0) { PetscCall(VecDuplicate(am->Ax, &am->Bz0)); }
547   if (!am->y) {
548     PetscCall(VecDuplicate(am->Ax, &am->y));
549     PetscCall(VecSet(am->y, 0.0));
550   }
551   if (!am->yold) {
552     PetscCall(VecDuplicate(am->Ax, &am->yold));
553     PetscCall(VecSet(am->yold, 0.0));
554   }
555   if (!am->y0) {
556     PetscCall(VecDuplicate(am->Ax, &am->y0));
557     PetscCall(VecSet(am->y0, 0.0));
558   }
559   if (!am->yhat) {
560     PetscCall(VecDuplicate(am->Ax, &am->yhat));
561     PetscCall(VecSet(am->yhat, 0.0));
562   }
563   if (!am->yhatold) {
564     PetscCall(VecDuplicate(am->Ax, &am->yhatold));
565     PetscCall(VecSet(am->yhatold, 0.0));
566   }
567   if (!am->residual) {
568     PetscCall(VecDuplicate(am->Ax, &am->residual));
569     PetscCall(VecSet(am->residual, 0.0));
570   }
571   if (!am->constraint) {
572     am->constraint = NULL;
573   } else {
574     PetscCall(VecGetSize(am->constraint, &M));
575     PetscCheck(M == N, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Solution vector and constraint vector must be of same size!");
576   }
577 
578   /* Save changed tao tolerance for adaptive tolerance */
579   if (tao->gatol_changed) { am->gatol_admm = tao->gatol; }
580   if (tao->catol_changed) { am->catol_admm = tao->catol; }
581 
582   /*Update spectral and dual elements to X subsolver */
583   PetscCall(TaoSetObjectiveAndGradient(am->subsolverX, NULL, SubObjGradUpdate, tao));
584   PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverX, am->JA, am->JApre, am->ops->misfitjac, am->misfitjacobianP));
585   PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverZ, am->JB, am->JBpre, am->ops->regjac, am->regjacobianP));
586   PetscFunctionReturn(0);
587 }
588 
589 static PetscErrorCode TaoDestroy_ADMM(Tao tao) {
590   TAO_ADMM *am = (TAO_ADMM *)tao->data;
591 
592   PetscFunctionBegin;
593   PetscCall(VecDestroy(&am->z));
594   PetscCall(VecDestroy(&am->Ax));
595   PetscCall(VecDestroy(&am->Axold));
596   PetscCall(VecDestroy(&am->Bz));
597   PetscCall(VecDestroy(&am->Bzold));
598   PetscCall(VecDestroy(&am->Bz0));
599   PetscCall(VecDestroy(&am->residual));
600   PetscCall(VecDestroy(&am->y));
601   PetscCall(VecDestroy(&am->yold));
602   PetscCall(VecDestroy(&am->y0));
603   PetscCall(VecDestroy(&am->yhat));
604   PetscCall(VecDestroy(&am->yhatold));
605   PetscCall(VecDestroy(&am->workLeft));
606   PetscCall(VecDestroy(&am->workJacobianRight));
607   PetscCall(VecDestroy(&am->workJacobianRight2));
608 
609   PetscCall(MatDestroy(&am->JA));
610   PetscCall(MatDestroy(&am->JB));
611   if (!am->xJI) { PetscCall(MatDestroy(&am->JApre)); }
612   if (!am->zJI) { PetscCall(MatDestroy(&am->JBpre)); }
613   if (am->Hx) {
614     PetscCall(MatDestroy(&am->Hx));
615     PetscCall(MatDestroy(&am->Hxpre));
616   }
617   if (am->Hz) {
618     PetscCall(MatDestroy(&am->Hz));
619     PetscCall(MatDestroy(&am->Hzpre));
620   }
621   PetscCall(MatDestroy(&am->ATA));
622   PetscCall(MatDestroy(&am->BTB));
623   PetscCall(TaoDestroy(&am->subsolverX));
624   PetscCall(TaoDestroy(&am->subsolverZ));
625   am->parent = NULL;
626   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
627   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
628   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
629   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
630   PetscCall(PetscFree(tao->data));
631   PetscFunctionReturn(0);
632 }
633 
634 /*MC
635 
636   TAOADMM - Alternating direction method of multipliers method fo solving linear problems with
637             constraints. in a min_x f(x) + g(z)  s.t. Ax+Bz=c.
638             This algorithm employs two sub Tao solvers, of which type can be specified
639             by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers.
640             Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via
641             TaoADMMSet{Misfit,Regularizer}HessianChangeStatus.
642             Second subsolver does support TAOSHELL. It should be noted that L1-norm is used for objective value for TAOSHELL type.
643             There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update,
644             currently there are basic option and adaptive option.
645             Constraint is set at Ax+Bz=c, and A and B can be set with TaoADMMSet{Misfit,Regularizer}ConstraintJacobian.
646             c can be set with TaoADMMSetConstraintVectorRHS.
647             The user can also provide regularizer weight for second subsolver.
648 
649   References:
650 . * - Xu, Zheng and Figueiredo, Mario A. T. and Yuan, Xiaoming and Studer, Christoph and Goldstein, Tom
651           "Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation"
652           The IEEE Conference on Computer Vision and Pattern Recognition (CVPR), July, 2017.
653 
654   Options Database Keys:
655 + -tao_admm_regularizer_coefficient        - regularizer constant (default 1.e-6)
656 . -tao_admm_spectral_penalty               - Constant for Augmented Lagrangian term (default 1.)
657 . -tao_admm_relaxation_parameter           - relaxation parameter for Z update (default 1.)
658 . -tao_admm_tolerance_update_factor        - ADMM dynamic tolerance update factor (default 1.e-12)
659 . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2)
660 . -tao_admm_minimum_spectral_penalty       - Set ADMM minimum spectral penalty (default 0)
661 . -tao_admm_dual_update                    - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic")
662 - -tao_admm_regularizer_type               - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold")
663 
664   Level: beginner
665 
666 .seealso: `TaoADMMSetMisfitHessianChangeStatus()`, `TaoADMMSetRegHessianChangeStatus()`, `TaoADMMGetSpectralPenalty()`,
667           `TaoADMMGetMisfitSubsolver()`, `TaoADMMGetRegularizationSubsolver()`, `TaoADMMSetConstraintVectorRHS()`,
668           `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetRegularizerCoefficient()`,
669           `TaoADMMSetRegularizerConstraintJacobian()`, `TaoADMMSetMisfitConstraintJacobian()`,
670           `TaoADMMSetMisfitObjectiveAndGradientRoutine()`, `TaoADMMSetMisfitHessianRoutine()`,
671           `TaoADMMSetRegularizerObjectiveAndGradientRoutine()`, `TaoADMMSetRegularizerHessianRoutine()`,
672           `TaoGetADMMParentTao()`, `TaoADMMGetDualVector()`, `TaoADMMSetRegularizerType()`,
673           `TaoADMMGetRegularizerType()`, `TaoADMMSetUpdateType()`, `TaoADMMGetUpdateType()`
674 M*/
675 
676 PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao) {
677   TAO_ADMM *am;
678 
679   PetscFunctionBegin;
680   PetscCall(PetscNewLog(tao, &am));
681 
682   tao->ops->destroy        = TaoDestroy_ADMM;
683   tao->ops->setup          = TaoSetUp_ADMM;
684   tao->ops->setfromoptions = TaoSetFromOptions_ADMM;
685   tao->ops->view           = TaoView_ADMM;
686   tao->ops->solve          = TaoSolve_ADMM;
687 
688   tao->data           = (void *)am;
689   am->l1epsilon       = 1e-6;
690   am->lambda          = 1e-4;
691   am->mu              = 1.;
692   am->muold           = 0.;
693   am->mueps           = PETSC_MACHINE_EPSILON;
694   am->mumin           = 0.;
695   am->orthval         = 0.2;
696   am->T               = 2;
697   am->parent          = tao;
698   am->update          = TAO_ADMM_UPDATE_BASIC;
699   am->regswitch       = TAO_ADMM_REGULARIZER_SOFT_THRESH;
700   am->tol             = PETSC_SMALL;
701   am->const_norm      = 0;
702   am->resnorm         = 0;
703   am->dualres         = 0;
704   am->ops->regobjgrad = NULL;
705   am->ops->reghess    = NULL;
706   am->gamma           = 1;
707   am->regobjgradP     = NULL;
708   am->reghessP        = NULL;
709   am->gatol_admm      = 1e-8;
710   am->catol_admm      = 0;
711   am->Hxchange        = PETSC_TRUE;
712   am->Hzchange        = PETSC_TRUE;
713   am->Hzbool          = PETSC_TRUE;
714   am->Hxbool          = PETSC_TRUE;
715 
716   PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverX));
717   PetscCall(TaoSetOptionsPrefix(am->subsolverX, "misfit_"));
718   PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverX, (PetscObject)tao, 1));
719   PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverZ));
720   PetscCall(TaoSetOptionsPrefix(am->subsolverZ, "reg_"));
721   PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ, (PetscObject)tao, 1));
722 
723   PetscCall(TaoSetType(am->subsolverX, TAONLS));
724   PetscCall(TaoSetType(am->subsolverZ, TAONLS));
725   PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
726   PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
727   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", TaoADMMSetRegularizerType_ADMM));
728   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", TaoADMMGetRegularizerType_ADMM));
729   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", TaoADMMSetUpdateType_ADMM));
730   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", TaoADMMGetUpdateType_ADMM));
731   PetscFunctionReturn(0);
732 }
733 
734 /*@
735   TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines  whether Hessian matrix of misfit subsolver changes with respect to input vector.
736 
737   Collective on Tao
738 
739   Input Parameters:
740 +  tao - the Tao solver context.
741 -  b - the Hessian matrix change status boolean, PETSC_FALSE  when the Hessian matrix does not change, TRUE otherwise.
742 
743   Level: advanced
744 
745 .seealso: `TAOADMM`
746 
747 @*/
748 PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b) {
749   TAO_ADMM *am = (TAO_ADMM *)tao->data;
750 
751   PetscFunctionBegin;
752   am->Hxchange = b;
753   PetscFunctionReturn(0);
754 }
755 
756 /*@
757   TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector.
758 
759   Collective on Tao
760 
761   Input Parameters:
762 +  tao - the Tao solver context
763 -  b - the Hessian matrix change status boolean, PETSC_FALSE when the Hessian matrix does not change, TRUE otherwise.
764 
765   Level: advanced
766 
767 .seealso: `TAOADMM`
768 
769 @*/
770 PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b) {
771   TAO_ADMM *am = (TAO_ADMM *)tao->data;
772 
773   PetscFunctionBegin;
774   am->Hzchange = b;
775   PetscFunctionReturn(0);
776 }
777 
778 /*@
779   TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value
780 
781   Collective on Tao
782 
783   Input Parameters:
784 +  tao - the Tao solver context
785 -  mu - spectral penalty
786 
787   Level: advanced
788 
789 .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TAOADMM`
790 @*/
791 PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu) {
792   TAO_ADMM *am = (TAO_ADMM *)tao->data;
793 
794   PetscFunctionBegin;
795   am->mu = mu;
796   PetscFunctionReturn(0);
797 }
798 
799 /*@
800   TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value
801 
802   Collective on Tao
803 
804   Input Parameter:
805 .  tao - the Tao solver context
806 
807   Output Parameter:
808 .  mu - spectral penalty
809 
810   Level: advanced
811 
812 .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetSpectralPenalty()`, `TAOADMM`
813 @*/
814 PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu) {
815   TAO_ADMM *am = (TAO_ADMM *)tao->data;
816 
817   PetscFunctionBegin;
818   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
819   PetscValidRealPointer(mu, 2);
820   *mu = am->mu;
821   PetscFunctionReturn(0);
822 }
823 
824 /*@
825   TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside ADMM
826 
827   Collective on Tao
828 
829   Input Parameter:
830 .  tao - the Tao solver context
831 
832    Output Parameter:
833 .  misfit - the Tao subsolver context
834 
835   Level: advanced
836 
837 .seealso: `TAOADMM`
838 
839 @*/
840 PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit) {
841   TAO_ADMM *am = (TAO_ADMM *)tao->data;
842 
843   PetscFunctionBegin;
844   *misfit = am->subsolverX;
845   PetscFunctionReturn(0);
846 }
847 
848 /*@
849   TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside ADMM
850 
851   Collective on Tao
852 
853   Input Parameter:
854 .  tao - the Tao solver context
855 
856   Output Parameter:
857 .  reg - the Tao subsolver context
858 
859   Level: advanced
860 
861 .seealso: `TAOADMM`
862 
863 @*/
864 PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg) {
865   TAO_ADMM *am = (TAO_ADMM *)tao->data;
866 
867   PetscFunctionBegin;
868   *reg = am->subsolverZ;
869   PetscFunctionReturn(0);
870 }
871 
872 /*@
873   TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for ADMM
874 
875   Collective on Tao
876 
877   Input Parameters:
878 + tao - the Tao solver context
879 - c - RHS vector
880 
881   Level: advanced
882 
883 .seealso: `TAOADMM`
884 
885 @*/
886 PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c) {
887   TAO_ADMM *am = (TAO_ADMM *)tao->data;
888 
889   PetscFunctionBegin;
890   am->constraint = c;
891   PetscFunctionReturn(0);
892 }
893 
894 /*@
895   TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty
896 
897   Collective on Tao
898 
899   Input Parameters:
900 +  tao - the Tao solver context
901 -  mu  - minimum spectral penalty value
902 
903   Level: advanced
904 
905 .seealso: `TaoADMMGetSpectralPenalty()`, `TAOADMM`
906 @*/
907 PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu) {
908   TAO_ADMM *am = (TAO_ADMM *)tao->data;
909 
910   PetscFunctionBegin;
911   am->mumin = mu;
912   PetscFunctionReturn(0);
913 }
914 
915 /*@
916   TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case
917 
918   Collective on Tao
919 
920   Input Parameters:
921 +  tao - the Tao solver context
922 -  lambda - L1-norm regularizer coefficient
923 
924   Level: advanced
925 
926 .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
927 
928 @*/
929 PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda) {
930   TAO_ADMM *am = (TAO_ADMM *)tao->data;
931 
932   PetscFunctionBegin;
933   am->lambda = lambda;
934   PetscFunctionReturn(0);
935 }
936 
937 /*@C
938   TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the ADMM algorithm. Matrix B constrains the z variable.
939 
940   Collective on Tao
941 
942   Input Parameters:
943 + tao - the Tao solver context
944 . J - user-created regularizer constraint Jacobian matrix
945 . Jpre - user-created regularizer Jacobian constraint preconditioner matrix
946 . func - function pointer for the regularizer constraint Jacobian update function
947 - ctx - user context for the regularizer Hessian
948 
949   Level: advanced
950 
951 .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
952 
953 @*/
954 PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx) {
955   TAO_ADMM *am = (TAO_ADMM *)tao->data;
956 
957   PetscFunctionBegin;
958   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
959   if (J) {
960     PetscValidHeaderSpecific(J, MAT_CLASSID, 2);
961     PetscCheckSameComm(tao, 1, J, 2);
962   }
963   if (Jpre) {
964     PetscValidHeaderSpecific(Jpre, MAT_CLASSID, 3);
965     PetscCheckSameComm(tao, 1, Jpre, 3);
966   }
967   if (ctx) am->misfitjacobianP = ctx;
968   if (func) am->ops->misfitjac = func;
969 
970   if (J) {
971     PetscCall(PetscObjectReference((PetscObject)J));
972     PetscCall(MatDestroy(&am->JA));
973     am->JA = J;
974   }
975   if (Jpre) {
976     PetscCall(PetscObjectReference((PetscObject)Jpre));
977     PetscCall(MatDestroy(&am->JApre));
978     am->JApre = Jpre;
979   }
980   PetscFunctionReturn(0);
981 }
982 
983 /*@C
984   TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for ADMM algorithm. Matrix B constraints z variable.
985 
986   Collective on Tao
987 
988   Input Parameters:
989 + tao - the Tao solver context
990 . J - user-created regularizer constraint Jacobian matrix
991 . Jpre - user-created regularizer Jacobian constraint preconditioner matrix
992 . func - function pointer for the regularizer constraint Jacobian update function
993 - ctx - user context for the regularizer Hessian
994 
995   Level: advanced
996 
997 .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetMisfitConstraintJacobian()`, `TAOADMM`
998 
999 @*/
1000 PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx) {
1001   TAO_ADMM *am = (TAO_ADMM *)tao->data;
1002 
1003   PetscFunctionBegin;
1004   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1005   if (J) {
1006     PetscValidHeaderSpecific(J, MAT_CLASSID, 2);
1007     PetscCheckSameComm(tao, 1, J, 2);
1008   }
1009   if (Jpre) {
1010     PetscValidHeaderSpecific(Jpre, MAT_CLASSID, 3);
1011     PetscCheckSameComm(tao, 1, Jpre, 3);
1012   }
1013   if (ctx) am->regjacobianP = ctx;
1014   if (func) am->ops->regjac = func;
1015 
1016   if (J) {
1017     PetscCall(PetscObjectReference((PetscObject)J));
1018     PetscCall(MatDestroy(&am->JB));
1019     am->JB = J;
1020   }
1021   if (Jpre) {
1022     PetscCall(PetscObjectReference((PetscObject)Jpre));
1023     PetscCall(MatDestroy(&am->JBpre));
1024     am->JBpre = Jpre;
1025   }
1026   PetscFunctionReturn(0);
1027 }
1028 
1029 /*@C
1030    TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function
1031 
1032    Collective on tao
1033 
1034    Input Parameters:
1035 +    tao - the Tao context
1036 .    func - function pointer for the misfit value and gradient evaluation
1037 -    ctx - user context for the misfit
1038 
1039    Level: advanced
1040 
1041 .seealso: `TAOADMM`
1042 
1043 @*/
1044 PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx) {
1045   TAO_ADMM *am = (TAO_ADMM *)tao->data;
1046 
1047   PetscFunctionBegin;
1048   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1049   am->misfitobjgradP     = ctx;
1050   am->ops->misfitobjgrad = func;
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 /*@C
1055    TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back
1056    function into the algorithm, to be used for subsolverX.
1057 
1058    Collective on tao
1059 
1060    Input Parameters:
1061 + tao - the Tao context
1062 . H - user-created matrix for the Hessian of the misfit term
1063 . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term
1064 . func - function pointer for the misfit Hessian evaluation
1065 - ctx - user context for the misfit Hessian
1066 
1067    Level: advanced
1068 
1069 .seealso: `TAOADMM`
1070 
1071 @*/
1072 PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx) {
1073   TAO_ADMM *am = (TAO_ADMM *)tao->data;
1074 
1075   PetscFunctionBegin;
1076   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1077   if (H) {
1078     PetscValidHeaderSpecific(H, MAT_CLASSID, 2);
1079     PetscCheckSameComm(tao, 1, H, 2);
1080   }
1081   if (Hpre) {
1082     PetscValidHeaderSpecific(Hpre, MAT_CLASSID, 3);
1083     PetscCheckSameComm(tao, 1, Hpre, 3);
1084   }
1085   if (ctx) { am->misfithessP = ctx; }
1086   if (func) { am->ops->misfithess = func; }
1087   if (H) {
1088     PetscCall(PetscObjectReference((PetscObject)H));
1089     PetscCall(MatDestroy(&am->Hx));
1090     am->Hx = H;
1091   }
1092   if (Hpre) {
1093     PetscCall(PetscObjectReference((PetscObject)Hpre));
1094     PetscCall(MatDestroy(&am->Hxpre));
1095     am->Hxpre = Hpre;
1096   }
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 /*@C
1101    TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function
1102 
1103    Collective on tao
1104 
1105    Input Parameters:
1106 + tao - the Tao context
1107 . func - function pointer for the regularizer value and gradient evaluation
1108 - ctx - user context for the regularizer
1109 
1110    Level: advanced
1111 
1112 .seealso: `TAOADMM`
1113 
1114 @*/
1115 PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx) {
1116   TAO_ADMM *am = (TAO_ADMM *)tao->data;
1117 
1118   PetscFunctionBegin;
1119   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1120   am->regobjgradP     = ctx;
1121   am->ops->regobjgrad = func;
1122   PetscFunctionReturn(0);
1123 }
1124 
1125 /*@C
1126    TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back
1127    function, to be used for subsolverZ.
1128 
1129    Collective on tao
1130 
1131    Input Parameters:
1132 + tao - the Tao context
1133 . H - user-created matrix for the Hessian of the regularization term
1134 . Hpre - user-created matrix for the preconditioner of Hessian of the regularization term
1135 . func - function pointer for the regularizer Hessian evaluation
1136 - ctx - user context for the regularizer Hessian
1137 
1138    Level: advanced
1139 
1140 .seealso: `TAOADMM`
1141 
1142 @*/
1143 PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx) {
1144   TAO_ADMM *am = (TAO_ADMM *)tao->data;
1145 
1146   PetscFunctionBegin;
1147   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1148   if (H) {
1149     PetscValidHeaderSpecific(H, MAT_CLASSID, 2);
1150     PetscCheckSameComm(tao, 1, H, 2);
1151   }
1152   if (Hpre) {
1153     PetscValidHeaderSpecific(Hpre, MAT_CLASSID, 3);
1154     PetscCheckSameComm(tao, 1, Hpre, 3);
1155   }
1156   if (ctx) { am->reghessP = ctx; }
1157   if (func) { am->ops->reghess = func; }
1158   if (H) {
1159     PetscCall(PetscObjectReference((PetscObject)H));
1160     PetscCall(MatDestroy(&am->Hz));
1161     am->Hz = H;
1162   }
1163   if (Hpre) {
1164     PetscCall(PetscObjectReference((PetscObject)Hpre));
1165     PetscCall(MatDestroy(&am->Hzpre));
1166     am->Hzpre = Hpre;
1167   }
1168   PetscFunctionReturn(0);
1169 }
1170 
1171 /*@
1172    TaoGetADMMParentTao - Gets pointer to parent ADMM tao, used by inner subsolver.
1173 
1174    Collective on tao
1175 
1176    Input Parameter:
1177 . tao - the Tao context
1178 
1179    Output Parameter:
1180 . admm_tao - the parent Tao context
1181 
1182    Level: advanced
1183 
1184 .seealso: `TAOADMM`
1185 
1186 @*/
1187 PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao) {
1188   PetscFunctionBegin;
1189   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1190   PetscCall(PetscObjectQuery((PetscObject)tao, "TaoGetADMMParentTao_ADMM", (PetscObject *)admm_tao));
1191   PetscFunctionReturn(0);
1192 }
1193 
1194 /*@
1195   TaoADMMGetDualVector - Returns the dual vector associated with the current TAOADMM state
1196 
1197   Not Collective
1198 
1199   Input Parameter:
1200 . tao - the Tao context
1201 
1202   Output Parameter:
1203 . Y - the current solution
1204 
1205   Level: intermediate
1206 
1207 .seealso: `TAOADMM`
1208 
1209 @*/
1210 PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y) {
1211   TAO_ADMM *am = (TAO_ADMM *)tao->data;
1212 
1213   PetscFunctionBegin;
1214   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1215   *Y = am->y;
1216   PetscFunctionReturn(0);
1217 }
1218 
1219 /*@
1220   TaoADMMSetRegularizerType - Set regularizer type for ADMM routine
1221 
1222   Not Collective
1223 
1224   Input Parameters:
1225 + tao  - the Tao context
1226 - type - regularizer type
1227 
1228   Options Database:
1229 .  -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh> - select the regularizer
1230 
1231   Level: intermediate
1232 
1233 .seealso: `TaoADMMGetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1234 @*/
1235 PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type) {
1236   PetscFunctionBegin;
1237   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1238   PetscValidLogicalCollectiveEnum(tao, type, 2);
1239   PetscTryMethod(tao, "TaoADMMSetRegularizerType_C", (Tao, TaoADMMRegularizerType), (tao, type));
1240   PetscFunctionReturn(0);
1241 }
1242 
1243 /*@
1244    TaoADMMGetRegularizerType - Gets the type of regularizer routine for ADMM
1245 
1246    Not Collective
1247 
1248    Input Parameter:
1249 .  tao - the Tao context
1250 
1251    Output Parameter:
1252 .  type - the type of regularizer
1253 
1254    Level: intermediate
1255 
1256 .seealso: `TaoADMMSetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1257 @*/
1258 PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type) {
1259   PetscFunctionBegin;
1260   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1261   PetscUseMethod(tao, "TaoADMMGetRegularizerType_C", (Tao, TaoADMMRegularizerType *), (tao, type));
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 /*@
1266   TaoADMMSetUpdateType - Set update routine for ADMM routine
1267 
1268   Not Collective
1269 
1270   Input Parameters:
1271 + tao  - the Tao context
1272 - type - spectral parameter update type
1273 
1274   Level: intermediate
1275 
1276 .seealso: `TaoADMMGetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1277 @*/
1278 PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type) {
1279   PetscFunctionBegin;
1280   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1281   PetscValidLogicalCollectiveEnum(tao, type, 2);
1282   PetscTryMethod(tao, "TaoADMMSetUpdateType_C", (Tao, TaoADMMUpdateType), (tao, type));
1283   PetscFunctionReturn(0);
1284 }
1285 
1286 /*@
1287    TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for ADMM
1288 
1289    Not Collective
1290 
1291    Input Parameter:
1292 .  tao - the Tao context
1293 
1294    Output Parameter:
1295 .  type - the type of spectral penalty update routine
1296 
1297    Level: intermediate
1298 
1299 .seealso: `TaoADMMSetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1300 @*/
1301 PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type) {
1302   PetscFunctionBegin;
1303   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
1304   PetscUseMethod(tao, "TaoADMMGetUpdateType_C", (Tao, TaoADMMUpdateType *), (tao, type));
1305   PetscFunctionReturn(0);
1306 }
1307