Actual source code: admm.c
1: #include <../src/tao/constrained/impls/admm/admm.h>
2: #include <petsctao.h>
3: #include <petsc/private/petscimpl.h>
5: /* Updates terminating criteria
6: *
7: * 1 ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||}
8: *
9: * 2. Updates dual residual, d_k
10: *
11: * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty|| */
13: static PetscBool cited = PETSC_FALSE;
14: static const char citation[] = "@misc{xu2017adaptive,\n"
15: " title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n"
16: " author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n"
17: " year={2017},\n"
18: " eprint={1704.02712},\n"
19: " archivePrefix={arXiv},\n"
20: " primaryClass={cs.CV}\n"
21: "} \n";
23: const char *const TaoADMMRegularizerTypes[] = {"REGULARIZER_USER", "REGULARIZER_SOFT_THRESH", "TaoADMMRegularizerType", "TAO_ADMM_", NULL};
24: const char *const TaoADMMUpdateTypes[] = {"UPDATE_BASIC", "UPDATE_ADAPTIVE", "UPDATE_ADAPTIVE_RELAXED", "TaoADMMUpdateType", "TAO_ADMM_", NULL};
25: const char *const TaoALMMTypes[] = {"CLASSIC", "PHR", "TaoALMMType", "TAO_ALMM_", NULL};
27: static PetscErrorCode TaoADMMToleranceUpdate(Tao tao)
28: {
29: TAO_ADMM *am = (TAO_ADMM *)tao->data;
30: PetscReal Axnorm, Bznorm, ATynorm, temp;
31: Vec tempJR, tempL;
32: Tao mis;
34: mis = am->subsolverX;
35: tempJR = am->workJacobianRight;
36: tempL = am->workLeft;
37: /* ATy */
38: TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre);
39: MatMultTranspose(mis->jacobian_equality, am->y, tempJR);
40: VecNorm(tempJR, NORM_2, &ATynorm);
41: /* dualres = mu * ||AT(Bz-Bzold)||_2 */
42: VecWAXPY(tempJR, -1., am->Bzold, am->Bz);
43: MatMultTranspose(mis->jacobian_equality, tempJR, tempL);
44: VecNorm(tempL, NORM_2, &am->dualres);
45: am->dualres *= am->mu;
47: /* ||Ax||_2, ||Bz||_2 */
48: VecNorm(am->Ax, NORM_2, &Axnorm);
49: VecNorm(am->Bz, NORM_2, &Bznorm);
51: /* Set catol to be catol_admm * max{||Ax||,||Bz||,||c||} *
52: * Set gatol to be gatol_admm * ||A^Ty|| *
53: * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */
54: temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm, am->const_norm));
55: TaoSetConstraintTolerances(tao, temp, PETSC_DEFAULT);
56: TaoSetTolerances(tao, am->gatol_admm * ATynorm, PETSC_DEFAULT, PETSC_DEFAULT);
57: return 0;
58: }
60: /* Penaly Update for Adaptive ADMM. */
61: static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao)
62: {
63: TAO_ADMM *am = (TAO_ADMM *)tao->data;
64: PetscReal ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k;
65: PetscBool hflag, gflag;
66: Vec tempJR, tempJR2;
68: tempJR = am->workJacobianRight;
69: tempJR2 = am->workJacobianRight2;
70: hflag = PETSC_FALSE;
71: gflag = PETSC_FALSE;
72: a_k = -1;
73: b_k = -1;
75: VecWAXPY(tempJR, -1., am->Axold, am->Ax);
76: VecWAXPY(tempJR2, -1., am->yhatold, am->yhat);
77: VecNorm(tempJR, NORM_2, &Axdiff_norm);
78: VecNorm(tempJR2, NORM_2, &yhatdiff_norm);
79: VecDot(tempJR, tempJR2, &Axyhat);
81: VecWAXPY(tempJR, -1., am->Bz0, am->Bz);
82: VecWAXPY(tempJR2, -1., am->y, am->y0);
83: VecNorm(tempJR, NORM_2, &Bzdiff_norm);
84: VecNorm(tempJR2, NORM_2, &ydiff_norm);
85: VecDot(tempJR, tempJR2, &Bzy);
87: if (Axyhat > am->orthval * Axdiff_norm * yhatdiff_norm + am->mueps) {
88: hflag = PETSC_TRUE;
89: a_sd = PetscSqr(yhatdiff_norm) / Axyhat; /* alphaSD */
90: a_mg = Axyhat / PetscSqr(Axdiff_norm); /* alphaMG */
91: a_k = (a_mg / a_sd) > 0.5 ? a_mg : a_sd - 0.5 * a_mg;
92: }
93: if (Bzy > am->orthval * Bzdiff_norm * ydiff_norm + am->mueps) {
94: gflag = PETSC_TRUE;
95: b_sd = PetscSqr(ydiff_norm) / Bzy; /* betaSD */
96: b_mg = Bzy / PetscSqr(Bzdiff_norm); /* betaMG */
97: b_k = (b_mg / b_sd) > 0.5 ? b_mg : b_sd - 0.5 * b_mg;
98: }
99: am->muold = am->mu;
100: if (gflag && hflag) {
101: am->mu = PetscSqrtReal(a_k * b_k);
102: } else if (hflag) {
103: am->mu = a_k;
104: } else if (gflag) {
105: am->mu = b_k;
106: }
107: if (am->mu > am->muold) am->mu = am->muold;
108: if (am->mu < am->mumin) am->mu = am->mumin;
109: return 0;
110: }
112: static PetscErrorCode TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type)
113: {
114: TAO_ADMM *am = (TAO_ADMM *)tao->data;
116: am->regswitch = type;
117: return 0;
118: }
120: static PetscErrorCode TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type)
121: {
122: TAO_ADMM *am = (TAO_ADMM *)tao->data;
124: *type = am->regswitch;
125: return 0;
126: }
128: static PetscErrorCode TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type)
129: {
130: TAO_ADMM *am = (TAO_ADMM *)tao->data;
132: am->update = type;
133: return 0;
134: }
136: static PetscErrorCode TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type)
137: {
138: TAO_ADMM *am = (TAO_ADMM *)tao->data;
140: *type = am->update;
141: return 0;
142: }
144: /* This routine updates Jacobians with new x,z vectors,
145: * and then updates Ax and Bz vectors, then computes updated residual vector*/
146: static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual)
147: {
148: TAO_ADMM *am = (TAO_ADMM *)tao->data;
149: Tao mis, reg;
151: mis = am->subsolverX;
152: reg = am->subsolverZ;
153: TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre);
154: MatMult(mis->jacobian_equality, x, Ax);
155: TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre);
156: MatMult(reg->jacobian_equality, z, Bz);
158: VecWAXPY(residual, 1., Bz, Ax);
159: if (am->constraint != NULL) VecAXPY(residual, -1., am->constraint);
160: return 0;
161: }
163: /* Updates Augmented Lagrangians to given routines *
164: * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet
165: * Separate Objective and Gradient routines are not supported. */
166: static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr)
167: {
168: Tao parent = (Tao)ptr;
169: TAO_ADMM *am = (TAO_ADMM *)parent->data;
170: PetscReal temp, temp2;
171: Vec tempJR;
173: tempJR = am->workJacobianRight;
174: ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
175: (*am->ops->misfitobjgrad)(am->subsolverX, x, f, g, am->misfitobjgradP);
177: am->last_misfit_val = *f;
178: /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
179: VecTDot(am->residual, am->y, &temp);
180: VecTDot(am->residual, am->residual, &temp2);
181: *f += temp + (am->mu / 2) * temp2;
183: /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/
184: MatMultTranspose(tao->jacobian_equality, am->residual, tempJR);
185: VecAXPY(g, am->mu, tempJR);
186: MatMultTranspose(tao->jacobian_equality, am->y, tempJR);
187: VecAXPY(g, 1., tempJR);
188: return 0;
189: }
191: /* Updates Augmented Lagrangians to given routines
192: * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet
193: * Separate Objective and Gradient routines are not supported. */
194: static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr)
195: {
196: Tao parent = (Tao)ptr;
197: TAO_ADMM *am = (TAO_ADMM *)parent->data;
198: PetscReal temp, temp2;
199: Vec tempJR;
201: tempJR = am->workJacobianRight;
202: ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual);
203: (*am->ops->regobjgrad)(am->subsolverZ, z, f, g, am->regobjgradP);
204: am->last_reg_val = *f;
205: /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
206: VecTDot(am->residual, am->y, &temp);
207: VecTDot(am->residual, am->residual, &temp2);
208: *f += temp + (am->mu / 2) * temp2;
210: /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/
211: MatMultTranspose(am->subsolverZ->jacobian_equality, am->residual, tempJR);
212: VecAXPY(g, am->mu, tempJR);
213: MatMultTranspose(am->subsolverZ->jacobian_equality, am->y, tempJR);
214: VecAXPY(g, 1., tempJR);
215: return 0;
216: }
218: /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */
219: static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm)
220: {
221: TAO_ADMM *am = (TAO_ADMM *)tao->data;
222: PetscInt N;
224: VecGetSize(am->workLeft, &N);
225: VecPointwiseMult(am->workLeft, x, x);
226: VecShift(am->workLeft, am->l1epsilon * am->l1epsilon);
227: VecSqrtAbs(am->workLeft);
228: VecSum(am->workLeft, norm);
229: *norm += N * am->l1epsilon;
230: *norm *= am->lambda;
231: return 0;
232: }
234: static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr)
235: {
236: TAO_ADMM *am = (TAO_ADMM *)ptr;
238: switch (am->update) {
239: case (TAO_ADMM_UPDATE_BASIC):
240: break;
241: case (TAO_ADMM_UPDATE_ADAPTIVE):
242: case (TAO_ADMM_UPDATE_ADAPTIVE_RELAXED):
243: if (H && (am->muold != am->mu)) {
244: if (!Identity) {
245: MatAXPY(H, am->mu - am->muold, Constraint, DIFFERENT_NONZERO_PATTERN);
246: } else {
247: MatShift(H, am->mu - am->muold);
248: }
249: }
250: break;
251: }
252: return 0;
253: }
255: /* Updates Hessian - adds second derivative of augmented Lagrangian
256: * H \gets H + \rho*ATA
257: * Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op
258: * For ADAPTAIVE,ADAPTIVE_RELAXED,
259: * H \gets H + (\rho-\rhoold)*ATA
260: * Here, we assume that A is linear constraint i.e., doesnt change.
261: * Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */
262: static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr)
263: {
264: Tao parent = (Tao)ptr;
265: TAO_ADMM *am = (TAO_ADMM *)parent->data;
267: if (am->Hxchange) {
268: /* Case where Hessian gets updated with respect to x vector input. */
269: (*am->ops->misfithess)(am->subsolverX, x, H, Hpre, am->misfithessP);
270: ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am);
271: } else if (am->Hxbool) {
272: /* Hessian doesn't get updated. H(x) = c */
273: /* Update Lagrangian only only per TAO call */
274: ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am);
275: am->Hxbool = PETSC_FALSE;
276: }
277: return 0;
278: }
280: /* Same as SubHessianUpdate, except for B matrix instead of A matrix */
281: static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr)
282: {
283: Tao parent = (Tao)ptr;
284: TAO_ADMM *am = (TAO_ADMM *)parent->data;
287: if (am->Hzchange) {
288: /* Case where Hessian gets updated with respect to x vector input. */
289: (*am->ops->reghess)(am->subsolverZ, z, H, Hpre, am->reghessP);
290: ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am);
291: } else if (am->Hzbool) {
292: /* Hessian doesn't get updated. H(x) = c */
293: /* Update Lagrangian only only per TAO call */
294: ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am);
295: am->Hzbool = PETSC_FALSE;
296: }
297: return 0;
298: }
300: /* Shell Matrix routine for A matrix.
301: * This gets used when user puts NULL for
302: * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...)
303: * Essentially sets A=I*/
304: static PetscErrorCode JacobianIdentity(Mat mat, Vec in, Vec out)
305: {
306: VecCopy(in, out);
307: return 0;
308: }
310: /* Shell Matrix routine for B matrix.
311: * This gets used when user puts NULL for
312: * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...)
313: * Sets B=-I */
314: static PetscErrorCode JacobianIdentityB(Mat mat, Vec in, Vec out)
315: {
316: VecCopy(in, out);
317: VecScale(out, -1.);
318: return 0;
319: }
321: /* Solve f(x) + g(z) s.t. Ax + Bz = c */
322: static PetscErrorCode TaoSolve_ADMM(Tao tao)
323: {
324: TAO_ADMM *am = (TAO_ADMM *)tao->data;
325: PetscInt N;
326: PetscReal reg_func;
327: PetscBool is_reg_shell;
328: Vec tempL;
330: if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
333: if (am->constraint != NULL) VecNorm(am->constraint, NORM_2, &am->const_norm);
334: }
335: tempL = am->workLeft;
336: VecGetSize(tempL, &N);
338: if (am->Hx && am->ops->misfithess) TaoSetHessian(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao);
340: if (!am->zJI) {
341: /* Currently, B is assumed to be a linear system, i.e., not getting updated*/
342: MatTransposeMatMult(am->JB, am->JB, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &(am->BTB));
343: }
344: if (!am->xJI) {
345: /* Currently, A is assumed to be a linear system, i.e., not getting updated*/
346: MatTransposeMatMult(am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &(am->ATA));
347: }
349: is_reg_shell = PETSC_FALSE;
351: PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell);
353: if (!is_reg_shell) {
354: switch (am->regswitch) {
355: case (TAO_ADMM_REGULARIZER_USER):
356: break;
357: case (TAO_ADMM_REGULARIZER_SOFT_THRESH):
358: /* Soft Threshold. */
359: break;
360: }
361: if (am->ops->regobjgrad) TaoSetObjectiveAndGradient(am->subsolverZ, NULL, RegObjGradUpdate, tao);
362: if (am->Hz && am->ops->reghess) TaoSetHessian(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao);
363: }
365: switch (am->update) {
366: case TAO_ADMM_UPDATE_BASIC:
367: if (am->subsolverX->hessian) {
368: /* In basic case, Hessian does not get updated w.r.t. to spectral penalty
369: * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/
370: if (!am->xJI) {
371: MatAXPY(am->subsolverX->hessian, am->mu, am->ATA, DIFFERENT_NONZERO_PATTERN);
372: } else {
373: MatShift(am->subsolverX->hessian, am->mu);
374: }
375: }
376: if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) {
377: if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) {
378: MatAXPY(am->subsolverZ->hessian, am->mu, am->BTB, DIFFERENT_NONZERO_PATTERN);
379: } else {
380: MatShift(am->subsolverZ->hessian, am->mu);
381: }
382: }
383: break;
384: case TAO_ADMM_UPDATE_ADAPTIVE:
385: case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
386: break;
387: }
389: PetscCitationsRegister(citation, &cited);
390: tao->reason = TAO_CONTINUE_ITERATING;
392: while (tao->reason == TAO_CONTINUE_ITERATING) {
393: PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);
394: VecCopy(am->Bz, am->Bzold);
396: /* x update */
397: TaoSolve(am->subsolverX);
398: TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre);
399: MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution, am->Ax);
401: am->Hxbool = PETSC_TRUE;
403: /* z update */
404: switch (am->regswitch) {
405: case TAO_ADMM_REGULARIZER_USER:
406: TaoSolve(am->subsolverZ);
407: break;
408: case TAO_ADMM_REGULARIZER_SOFT_THRESH:
409: /* L1 assumes A,B jacobians are identity nxn matrix */
410: VecWAXPY(am->workJacobianRight, 1 / am->mu, am->y, am->Ax);
411: TaoSoftThreshold(am->workJacobianRight, -am->lambda / am->mu, am->lambda / am->mu, am->subsolverZ->solution);
412: break;
413: }
414: am->Hzbool = PETSC_TRUE;
415: /* Returns Ax + Bz - c with updated Ax,Bz vectors */
416: ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
417: /* Dual variable, y += y + mu*(Ax+Bz-c) */
418: VecWAXPY(am->y, am->mu, am->residual, am->yold);
420: /* stopping tolerance update */
421: TaoADMMToleranceUpdate(tao);
423: /* Updating Spectral Penalty */
424: switch (am->update) {
425: case TAO_ADMM_UPDATE_BASIC:
426: am->muold = am->mu;
427: break;
428: case TAO_ADMM_UPDATE_ADAPTIVE:
429: case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
430: if (tao->niter == 0) {
431: VecCopy(am->y, am->y0);
432: VecWAXPY(am->residual, 1., am->Ax, am->Bzold);
433: if (am->constraint) VecAXPY(am->residual, -1., am->constraint);
434: VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold);
435: VecCopy(am->Ax, am->Axold);
436: VecCopy(am->Bz, am->Bz0);
437: am->muold = am->mu;
438: } else if (tao->niter % am->T == 1) {
439: /* we have compute Bzold in a previous iteration, and we computed Ax above */
440: VecWAXPY(am->residual, 1., am->Ax, am->Bzold);
441: if (am->constraint) VecAXPY(am->residual, -1., am->constraint);
442: VecWAXPY(am->yhat, -am->mu, am->residual, am->yold);
443: AdaptiveADMMPenaltyUpdate(tao);
444: VecCopy(am->Ax, am->Axold);
445: VecCopy(am->Bz, am->Bz0);
446: VecCopy(am->yhat, am->yhatold);
447: VecCopy(am->y, am->y0);
448: } else {
449: am->muold = am->mu;
450: }
451: break;
452: default:
453: break;
454: }
455: tao->niter++;
457: /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/
458: switch (am->regswitch) {
459: case TAO_ADMM_REGULARIZER_USER:
460: if (is_reg_shell) {
461: ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, ®_func);
462: } else {
463: (*am->ops->regobjgrad)(am->subsolverZ, am->subsolverX->solution, ®_func, tempL, am->regobjgradP);
464: }
465: break;
466: case TAO_ADMM_REGULARIZER_SOFT_THRESH:
467: ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, ®_func);
468: break;
469: }
470: VecCopy(am->y, am->yold);
471: ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
472: VecNorm(am->residual, NORM_2, &am->resnorm);
473: TaoLogConvergenceHistory(tao, am->last_misfit_val + reg_func, am->dualres, am->resnorm, tao->ksp_its);
475: TaoMonitor(tao, tao->niter, am->last_misfit_val + reg_func, am->dualres, am->resnorm, 1.0);
476: PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
477: }
478: /* Update vectors */
479: VecCopy(am->subsolverX->solution, tao->solution);
480: VecCopy(am->subsolverX->gradient, tao->gradient);
481: PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", NULL);
482: PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", NULL);
483: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL);
484: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL);
485: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL);
486: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL);
487: return 0;
488: }
490: static PetscErrorCode TaoSetFromOptions_ADMM(Tao tao, PetscOptionItems *PetscOptionsObject)
491: {
492: TAO_ADMM *am = (TAO_ADMM *)tao->data;
494: PetscOptionsHeadBegin(PetscOptionsObject, "ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. ");
495: PetscOptionsReal("-tao_admm_regularizer_coefficient", "regularizer constant", "", am->lambda, &am->lambda, NULL);
496: PetscOptionsReal("-tao_admm_spectral_penalty", "Constant for Augmented Lagrangian term.", "", am->mu, &am->mu, NULL);
497: PetscOptionsReal("-tao_admm_relaxation_parameter", "x relaxation parameter for Z update.", "", am->gamma, &am->gamma, NULL);
498: PetscOptionsReal("-tao_admm_tolerance_update_factor", "ADMM dynamic tolerance update factor.", "", am->tol, &am->tol, NULL);
499: PetscOptionsReal("-tao_admm_spectral_penalty_update_factor", "ADMM spectral penalty update curvature safeguard value.", "", am->orthval, &am->orthval, NULL);
500: PetscOptionsReal("-tao_admm_minimum_spectral_penalty", "Set ADMM minimum spectral penalty.", "", am->mumin, &am->mumin, NULL);
501: PetscOptionsEnum("-tao_admm_dual_update", "Lagrangian dual update policy", "TaoADMMUpdateType", TaoADMMUpdateTypes, (PetscEnum)am->update, (PetscEnum *)&am->update, NULL);
502: PetscOptionsEnum("-tao_admm_regularizer_type", "ADMM regularizer update rule", "TaoADMMRegularizerType", TaoADMMRegularizerTypes, (PetscEnum)am->regswitch, (PetscEnum *)&am->regswitch, NULL);
503: PetscOptionsHeadEnd();
504: TaoSetFromOptions(am->subsolverX);
505: if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) TaoSetFromOptions(am->subsolverZ);
506: return 0;
507: }
509: static PetscErrorCode TaoView_ADMM(Tao tao, PetscViewer viewer)
510: {
511: TAO_ADMM *am = (TAO_ADMM *)tao->data;
513: PetscViewerASCIIPushTab(viewer);
514: TaoView(am->subsolverX, viewer);
515: TaoView(am->subsolverZ, viewer);
516: PetscViewerASCIIPopTab(viewer);
517: return 0;
518: }
520: static PetscErrorCode TaoSetUp_ADMM(Tao tao)
521: {
522: TAO_ADMM *am = (TAO_ADMM *)tao->data;
523: PetscInt n, N, M;
525: VecGetLocalSize(tao->solution, &n);
526: VecGetSize(tao->solution, &N);
527: /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */
528: if (!am->JB) {
529: am->zJI = PETSC_TRUE;
530: MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JB);
531: MatShellSetOperation(am->JB, MATOP_MULT, (void (*)(void))JacobianIdentityB);
532: MatShellSetOperation(am->JB, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentityB);
533: am->JBpre = am->JB;
534: }
535: if (!am->JA) {
536: am->xJI = PETSC_TRUE;
537: MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JA);
538: MatShellSetOperation(am->JA, MATOP_MULT, (void (*)(void))JacobianIdentity);
539: MatShellSetOperation(am->JA, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentity);
540: am->JApre = am->JA;
541: }
542: MatCreateVecs(am->JA, NULL, &am->Ax);
543: if (!tao->gradient) VecDuplicate(tao->solution, &tao->gradient);
544: TaoSetSolution(am->subsolverX, tao->solution);
545: if (!am->z) {
546: VecDuplicate(tao->solution, &am->z);
547: VecSet(am->z, 0.0);
548: }
549: TaoSetSolution(am->subsolverZ, am->z);
550: if (!am->workLeft) VecDuplicate(tao->solution, &am->workLeft);
551: if (!am->Axold) VecDuplicate(am->Ax, &am->Axold);
552: if (!am->workJacobianRight) VecDuplicate(am->Ax, &am->workJacobianRight);
553: if (!am->workJacobianRight2) VecDuplicate(am->Ax, &am->workJacobianRight2);
554: if (!am->Bz) VecDuplicate(am->Ax, &am->Bz);
555: if (!am->Bzold) VecDuplicate(am->Ax, &am->Bzold);
556: if (!am->Bz0) VecDuplicate(am->Ax, &am->Bz0);
557: if (!am->y) {
558: VecDuplicate(am->Ax, &am->y);
559: VecSet(am->y, 0.0);
560: }
561: if (!am->yold) {
562: VecDuplicate(am->Ax, &am->yold);
563: VecSet(am->yold, 0.0);
564: }
565: if (!am->y0) {
566: VecDuplicate(am->Ax, &am->y0);
567: VecSet(am->y0, 0.0);
568: }
569: if (!am->yhat) {
570: VecDuplicate(am->Ax, &am->yhat);
571: VecSet(am->yhat, 0.0);
572: }
573: if (!am->yhatold) {
574: VecDuplicate(am->Ax, &am->yhatold);
575: VecSet(am->yhatold, 0.0);
576: }
577: if (!am->residual) {
578: VecDuplicate(am->Ax, &am->residual);
579: VecSet(am->residual, 0.0);
580: }
581: if (!am->constraint) {
582: am->constraint = NULL;
583: } else {
584: VecGetSize(am->constraint, &M);
586: }
588: /* Save changed tao tolerance for adaptive tolerance */
589: if (tao->gatol_changed) am->gatol_admm = tao->gatol;
590: if (tao->catol_changed) am->catol_admm = tao->catol;
592: /*Update spectral and dual elements to X subsolver */
593: TaoSetObjectiveAndGradient(am->subsolverX, NULL, SubObjGradUpdate, tao);
594: TaoSetJacobianEqualityRoutine(am->subsolverX, am->JA, am->JApre, am->ops->misfitjac, am->misfitjacobianP);
595: TaoSetJacobianEqualityRoutine(am->subsolverZ, am->JB, am->JBpre, am->ops->regjac, am->regjacobianP);
596: return 0;
597: }
599: static PetscErrorCode TaoDestroy_ADMM(Tao tao)
600: {
601: TAO_ADMM *am = (TAO_ADMM *)tao->data;
603: VecDestroy(&am->z);
604: VecDestroy(&am->Ax);
605: VecDestroy(&am->Axold);
606: VecDestroy(&am->Bz);
607: VecDestroy(&am->Bzold);
608: VecDestroy(&am->Bz0);
609: VecDestroy(&am->residual);
610: VecDestroy(&am->y);
611: VecDestroy(&am->yold);
612: VecDestroy(&am->y0);
613: VecDestroy(&am->yhat);
614: VecDestroy(&am->yhatold);
615: VecDestroy(&am->workLeft);
616: VecDestroy(&am->workJacobianRight);
617: VecDestroy(&am->workJacobianRight2);
619: MatDestroy(&am->JA);
620: MatDestroy(&am->JB);
621: if (!am->xJI) MatDestroy(&am->JApre);
622: if (!am->zJI) MatDestroy(&am->JBpre);
623: if (am->Hx) {
624: MatDestroy(&am->Hx);
625: MatDestroy(&am->Hxpre);
626: }
627: if (am->Hz) {
628: MatDestroy(&am->Hz);
629: MatDestroy(&am->Hzpre);
630: }
631: MatDestroy(&am->ATA);
632: MatDestroy(&am->BTB);
633: TaoDestroy(&am->subsolverX);
634: TaoDestroy(&am->subsolverZ);
635: am->parent = NULL;
636: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL);
637: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL);
638: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL);
639: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL);
640: PetscFree(tao->data);
641: return 0;
642: }
644: /*MC
646: TAOADMM - Alternating direction method of multipliers method fo solving linear problems with
647: constraints. in a min_x f(x) + g(z) s.t. Ax+Bz=c.
648: This algorithm employs two sub Tao solvers, of which type can be specified
649: by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers.
650: Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via
651: TaoADMMSet{Misfit,Regularizer}HessianChangeStatus.
652: Second subsolver does support TAOSHELL. It should be noted that L1-norm is used for objective value for TAOSHELL type.
653: There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update,
654: currently there are basic option and adaptive option.
655: Constraint is set at Ax+Bz=c, and A and B can be set with TaoADMMSet{Misfit,Regularizer}ConstraintJacobian.
656: c can be set with TaoADMMSetConstraintVectorRHS.
657: The user can also provide regularizer weight for second subsolver.
659: References:
660: . * - Xu, Zheng and Figueiredo, Mario A. T. and Yuan, Xiaoming and Studer, Christoph and Goldstein, Tom
661: "Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation"
662: The IEEE Conference on Computer Vision and Pattern Recognition (CVPR), July, 2017.
664: Options Database Keys:
665: + -tao_admm_regularizer_coefficient - regularizer constant (default 1.e-6)
666: . -tao_admm_spectral_penalty - Constant for Augmented Lagrangian term (default 1.)
667: . -tao_admm_relaxation_parameter - relaxation parameter for Z update (default 1.)
668: . -tao_admm_tolerance_update_factor - ADMM dynamic tolerance update factor (default 1.e-12)
669: . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2)
670: . -tao_admm_minimum_spectral_penalty - Set ADMM minimum spectral penalty (default 0)
671: . -tao_admm_dual_update - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic")
672: - -tao_admm_regularizer_type - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold")
674: Level: beginner
676: .seealso: `TaoADMMSetMisfitHessianChangeStatus()`, `TaoADMMSetRegHessianChangeStatus()`, `TaoADMMGetSpectralPenalty()`,
677: `TaoADMMGetMisfitSubsolver()`, `TaoADMMGetRegularizationSubsolver()`, `TaoADMMSetConstraintVectorRHS()`,
678: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetRegularizerCoefficient()`,
679: `TaoADMMSetRegularizerConstraintJacobian()`, `TaoADMMSetMisfitConstraintJacobian()`,
680: `TaoADMMSetMisfitObjectiveAndGradientRoutine()`, `TaoADMMSetMisfitHessianRoutine()`,
681: `TaoADMMSetRegularizerObjectiveAndGradientRoutine()`, `TaoADMMSetRegularizerHessianRoutine()`,
682: `TaoGetADMMParentTao()`, `TaoADMMGetDualVector()`, `TaoADMMSetRegularizerType()`,
683: `TaoADMMGetRegularizerType()`, `TaoADMMSetUpdateType()`, `TaoADMMGetUpdateType()`
684: M*/
686: PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao)
687: {
688: TAO_ADMM *am;
690: PetscNew(&am);
692: tao->ops->destroy = TaoDestroy_ADMM;
693: tao->ops->setup = TaoSetUp_ADMM;
694: tao->ops->setfromoptions = TaoSetFromOptions_ADMM;
695: tao->ops->view = TaoView_ADMM;
696: tao->ops->solve = TaoSolve_ADMM;
698: tao->data = (void *)am;
699: am->l1epsilon = 1e-6;
700: am->lambda = 1e-4;
701: am->mu = 1.;
702: am->muold = 0.;
703: am->mueps = PETSC_MACHINE_EPSILON;
704: am->mumin = 0.;
705: am->orthval = 0.2;
706: am->T = 2;
707: am->parent = tao;
708: am->update = TAO_ADMM_UPDATE_BASIC;
709: am->regswitch = TAO_ADMM_REGULARIZER_SOFT_THRESH;
710: am->tol = PETSC_SMALL;
711: am->const_norm = 0;
712: am->resnorm = 0;
713: am->dualres = 0;
714: am->ops->regobjgrad = NULL;
715: am->ops->reghess = NULL;
716: am->gamma = 1;
717: am->regobjgradP = NULL;
718: am->reghessP = NULL;
719: am->gatol_admm = 1e-8;
720: am->catol_admm = 0;
721: am->Hxchange = PETSC_TRUE;
722: am->Hzchange = PETSC_TRUE;
723: am->Hzbool = PETSC_TRUE;
724: am->Hxbool = PETSC_TRUE;
726: TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverX);
727: TaoSetOptionsPrefix(am->subsolverX, "misfit_");
728: PetscObjectIncrementTabLevel((PetscObject)am->subsolverX, (PetscObject)tao, 1);
729: TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverZ);
730: TaoSetOptionsPrefix(am->subsolverZ, "reg_");
731: PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ, (PetscObject)tao, 1);
733: TaoSetType(am->subsolverX, TAONLS);
734: TaoSetType(am->subsolverZ, TAONLS);
735: PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", (PetscObject)tao);
736: PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", (PetscObject)tao);
737: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", TaoADMMSetRegularizerType_ADMM);
738: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", TaoADMMGetRegularizerType_ADMM);
739: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", TaoADMMSetUpdateType_ADMM);
740: PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", TaoADMMGetUpdateType_ADMM);
741: return 0;
742: }
744: /*@
745: TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines whether Hessian matrix of misfit subsolver changes with respect to input vector.
747: Collective
749: Input Parameters:
750: + tao - the Tao solver context.
751: - b - the Hessian matrix change status boolean, PETSC_FALSE when the Hessian matrix does not change, TRUE otherwise.
753: Level: advanced
755: .seealso: `TAOADMM`
757: @*/
758: PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b)
759: {
760: TAO_ADMM *am = (TAO_ADMM *)tao->data;
762: am->Hxchange = b;
763: return 0;
764: }
766: /*@
767: TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector.
769: Collective
771: Input Parameters:
772: + tao - the Tao solver context
773: - b - the Hessian matrix change status boolean, PETSC_FALSE when the Hessian matrix does not change, TRUE otherwise.
775: Level: advanced
777: .seealso: `TAOADMM`
779: @*/
780: PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b)
781: {
782: TAO_ADMM *am = (TAO_ADMM *)tao->data;
784: am->Hzchange = b;
785: return 0;
786: }
788: /*@
789: TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value
791: Collective
793: Input Parameters:
794: + tao - the Tao solver context
795: - mu - spectral penalty
797: Level: advanced
799: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TAOADMM`
800: @*/
801: PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu)
802: {
803: TAO_ADMM *am = (TAO_ADMM *)tao->data;
805: am->mu = mu;
806: return 0;
807: }
809: /*@
810: TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value
812: Collective
814: Input Parameter:
815: . tao - the Tao solver context
817: Output Parameter:
818: . mu - spectral penalty
820: Level: advanced
822: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetSpectralPenalty()`, `TAOADMM`
823: @*/
824: PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu)
825: {
826: TAO_ADMM *am = (TAO_ADMM *)tao->data;
830: *mu = am->mu;
831: return 0;
832: }
834: /*@
835: TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside ADMM
837: Collective
839: Input Parameter:
840: . tao - the Tao solver context
842: Output Parameter:
843: . misfit - the Tao subsolver context
845: Level: advanced
847: .seealso: `TAOADMM`
849: @*/
850: PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit)
851: {
852: TAO_ADMM *am = (TAO_ADMM *)tao->data;
854: *misfit = am->subsolverX;
855: return 0;
856: }
858: /*@
859: TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside ADMM
861: Collective
863: Input Parameter:
864: . tao - the Tao solver context
866: Output Parameter:
867: . reg - the Tao subsolver context
869: Level: advanced
871: .seealso: `TAOADMM`
873: @*/
874: PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg)
875: {
876: TAO_ADMM *am = (TAO_ADMM *)tao->data;
878: *reg = am->subsolverZ;
879: return 0;
880: }
882: /*@
883: TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for ADMM
885: Collective
887: Input Parameters:
888: + tao - the Tao solver context
889: - c - RHS vector
891: Level: advanced
893: .seealso: `TAOADMM`
895: @*/
896: PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c)
897: {
898: TAO_ADMM *am = (TAO_ADMM *)tao->data;
900: am->constraint = c;
901: return 0;
902: }
904: /*@
905: TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty
907: Collective
909: Input Parameters:
910: + tao - the Tao solver context
911: - mu - minimum spectral penalty value
913: Level: advanced
915: .seealso: `TaoADMMGetSpectralPenalty()`, `TAOADMM`
916: @*/
917: PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu)
918: {
919: TAO_ADMM *am = (TAO_ADMM *)tao->data;
921: am->mumin = mu;
922: return 0;
923: }
925: /*@
926: TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case
928: Collective
930: Input Parameters:
931: + tao - the Tao solver context
932: - lambda - L1-norm regularizer coefficient
934: Level: advanced
936: .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
938: @*/
939: PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda)
940: {
941: TAO_ADMM *am = (TAO_ADMM *)tao->data;
943: am->lambda = lambda;
944: return 0;
945: }
947: /*@C
948: TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the ADMM algorithm. Matrix B constrains the z variable.
950: Collective
952: Input Parameters:
953: + tao - the Tao solver context
954: . J - user-created regularizer constraint Jacobian matrix
955: . Jpre - user-created regularizer Jacobian constraint preconditioner matrix
956: . func - function pointer for the regularizer constraint Jacobian update function
957: - ctx - user context for the regularizer Hessian
959: Level: advanced
961: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
963: @*/
964: PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
965: {
966: TAO_ADMM *am = (TAO_ADMM *)tao->data;
969: if (J) {
972: }
973: if (Jpre) {
976: }
977: if (ctx) am->misfitjacobianP = ctx;
978: if (func) am->ops->misfitjac = func;
980: if (J) {
981: PetscObjectReference((PetscObject)J);
982: MatDestroy(&am->JA);
983: am->JA = J;
984: }
985: if (Jpre) {
986: PetscObjectReference((PetscObject)Jpre);
987: MatDestroy(&am->JApre);
988: am->JApre = Jpre;
989: }
990: return 0;
991: }
993: /*@C
994: TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for ADMM algorithm. Matrix B constraints z variable.
996: Collective
998: Input Parameters:
999: + tao - the Tao solver context
1000: . J - user-created regularizer constraint Jacobian matrix
1001: . Jpre - user-created regularizer Jacobian constraint preconditioner matrix
1002: . func - function pointer for the regularizer constraint Jacobian update function
1003: - ctx - user context for the regularizer Hessian
1005: Level: advanced
1007: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetMisfitConstraintJacobian()`, `TAOADMM`
1009: @*/
1010: PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1011: {
1012: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1015: if (J) {
1018: }
1019: if (Jpre) {
1022: }
1023: if (ctx) am->regjacobianP = ctx;
1024: if (func) am->ops->regjac = func;
1026: if (J) {
1027: PetscObjectReference((PetscObject)J);
1028: MatDestroy(&am->JB);
1029: am->JB = J;
1030: }
1031: if (Jpre) {
1032: PetscObjectReference((PetscObject)Jpre);
1033: MatDestroy(&am->JBpre);
1034: am->JBpre = Jpre;
1035: }
1036: return 0;
1037: }
1039: /*@C
1040: TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function
1042: Collective
1044: Input Parameters:
1045: + tao - the Tao context
1046: . func - function pointer for the misfit value and gradient evaluation
1047: - ctx - user context for the misfit
1049: Level: advanced
1051: .seealso: `TAOADMM`
1053: @*/
1054: PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
1055: {
1056: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1059: am->misfitobjgradP = ctx;
1060: am->ops->misfitobjgrad = func;
1061: return 0;
1062: }
1064: /*@C
1065: TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back
1066: function into the algorithm, to be used for subsolverX.
1068: Collective
1070: Input Parameters:
1071: + tao - the Tao context
1072: . H - user-created matrix for the Hessian of the misfit term
1073: . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term
1074: . func - function pointer for the misfit Hessian evaluation
1075: - ctx - user context for the misfit Hessian
1077: Level: advanced
1079: .seealso: `TAOADMM`
1081: @*/
1082: PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1083: {
1084: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1087: if (H) {
1090: }
1091: if (Hpre) {
1094: }
1095: if (ctx) am->misfithessP = ctx;
1096: if (func) am->ops->misfithess = func;
1097: if (H) {
1098: PetscObjectReference((PetscObject)H);
1099: MatDestroy(&am->Hx);
1100: am->Hx = H;
1101: }
1102: if (Hpre) {
1103: PetscObjectReference((PetscObject)Hpre);
1104: MatDestroy(&am->Hxpre);
1105: am->Hxpre = Hpre;
1106: }
1107: return 0;
1108: }
1110: /*@C
1111: TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function
1113: Collective
1115: Input Parameters:
1116: + tao - the Tao context
1117: . func - function pointer for the regularizer value and gradient evaluation
1118: - ctx - user context for the regularizer
1120: Level: advanced
1122: .seealso: `TAOADMM`
1124: @*/
1125: PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
1126: {
1127: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1130: am->regobjgradP = ctx;
1131: am->ops->regobjgrad = func;
1132: return 0;
1133: }
1135: /*@C
1136: TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back
1137: function, to be used for subsolverZ.
1139: Collective
1141: Input Parameters:
1142: + tao - the Tao context
1143: . H - user-created matrix for the Hessian of the regularization term
1144: . Hpre - user-created matrix for the preconditioner of Hessian of the regularization term
1145: . func - function pointer for the regularizer Hessian evaluation
1146: - ctx - user context for the regularizer Hessian
1148: Level: advanced
1150: .seealso: `TAOADMM`
1152: @*/
1153: PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1154: {
1155: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1158: if (H) {
1161: }
1162: if (Hpre) {
1165: }
1166: if (ctx) am->reghessP = ctx;
1167: if (func) am->ops->reghess = func;
1168: if (H) {
1169: PetscObjectReference((PetscObject)H);
1170: MatDestroy(&am->Hz);
1171: am->Hz = H;
1172: }
1173: if (Hpre) {
1174: PetscObjectReference((PetscObject)Hpre);
1175: MatDestroy(&am->Hzpre);
1176: am->Hzpre = Hpre;
1177: }
1178: return 0;
1179: }
1181: /*@
1182: TaoGetADMMParentTao - Gets pointer to parent ADMM tao, used by inner subsolver.
1184: Collective
1186: Input Parameter:
1187: . tao - the Tao context
1189: Output Parameter:
1190: . admm_tao - the parent Tao context
1192: Level: advanced
1194: .seealso: `TAOADMM`
1196: @*/
1197: PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao)
1198: {
1200: PetscObjectQuery((PetscObject)tao, "TaoGetADMMParentTao_ADMM", (PetscObject *)admm_tao);
1201: return 0;
1202: }
1204: /*@
1205: TaoADMMGetDualVector - Returns the dual vector associated with the current TAOADMM state
1207: Not Collective
1209: Input Parameter:
1210: . tao - the Tao context
1212: Output Parameter:
1213: . Y - the current solution
1215: Level: intermediate
1217: .seealso: `TAOADMM`
1219: @*/
1220: PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y)
1221: {
1222: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1225: *Y = am->y;
1226: return 0;
1227: }
1229: /*@
1230: TaoADMMSetRegularizerType - Set regularizer type for ADMM routine
1232: Not Collective
1234: Input Parameters:
1235: + tao - the Tao context
1236: - type - regularizer type
1238: Options Database Key:
1239: . -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh> - select the regularizer
1241: Level: intermediate
1243: .seealso: `TaoADMMGetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1244: @*/
1245: PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type)
1246: {
1249: PetscTryMethod(tao, "TaoADMMSetRegularizerType_C", (Tao, TaoADMMRegularizerType), (tao, type));
1250: return 0;
1251: }
1253: /*@
1254: TaoADMMGetRegularizerType - Gets the type of regularizer routine for ADMM
1256: Not Collective
1258: Input Parameter:
1259: . tao - the Tao context
1261: Output Parameter:
1262: . type - the type of regularizer
1264: Level: intermediate
1266: .seealso: `TaoADMMSetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1267: @*/
1268: PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type)
1269: {
1271: PetscUseMethod(tao, "TaoADMMGetRegularizerType_C", (Tao, TaoADMMRegularizerType *), (tao, type));
1272: return 0;
1273: }
1275: /*@
1276: TaoADMMSetUpdateType - Set update routine for ADMM routine
1278: Not Collective
1280: Input Parameters:
1281: + tao - the Tao context
1282: - type - spectral parameter update type
1284: Level: intermediate
1286: .seealso: `TaoADMMGetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1287: @*/
1288: PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type)
1289: {
1292: PetscTryMethod(tao, "TaoADMMSetUpdateType_C", (Tao, TaoADMMUpdateType), (tao, type));
1293: return 0;
1294: }
1296: /*@
1297: TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for ADMM
1299: Not Collective
1301: Input Parameter:
1302: . tao - the Tao context
1304: Output Parameter:
1305: . type - the type of spectral penalty update routine
1307: Level: intermediate
1309: .seealso: `TaoADMMSetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1310: @*/
1311: PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type)
1312: {
1314: PetscUseMethod(tao, "TaoADMMGetUpdateType_C", (Tao, TaoADMMUpdateType *), (tao, type));
1315: return 0;
1316: }