Actual source code: bnk.c

  1: #include <petsctaolinesearch.h>
  2: #include <../src/tao/bound/impls/bnk/bnk.h>
  3: #include <petscksp.h>

  5: static const char *BNK_INIT[64]   = {"constant", "direction", "interpolation"};
  6: static const char *BNK_UPDATE[64] = {"step", "reduction", "interpolation"};
  7: static const char *BNK_AS[64]     = {"none", "bertsekas"};

  9: /* Extracts from the full Hessian the part associated with the current bnk->inactive_idx and set the PCLMVM preconditioner */

 11: static PetscErrorCode TaoBNKComputeSubHessian(Tao tao)
 12: {
 13:   TAO_BNK *bnk = (TAO_BNK *)tao->data;

 15:   MatDestroy(&bnk->Hpre_inactive);
 16:   MatDestroy(&bnk->H_inactive);
 17:   if (bnk->active_idx) {
 18:     MatCreateSubMatrix(tao->hessian, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->H_inactive);
 19:     if (tao->hessian == tao->hessian_pre) {
 20:       PetscObjectReference((PetscObject)bnk->H_inactive);
 21:       bnk->Hpre_inactive = bnk->H_inactive;
 22:     } else {
 23:       MatCreateSubMatrix(tao->hessian_pre, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->Hpre_inactive);
 24:     }
 25:     if (bnk->bfgs_pre) PCLMVMSetIS(bnk->bfgs_pre, bnk->inactive_idx);
 26:   } else {
 27:     PetscObjectReference((PetscObject)tao->hessian);
 28:     bnk->H_inactive = tao->hessian;
 29:     PetscObjectReference((PetscObject)tao->hessian_pre);
 30:     bnk->Hpre_inactive = tao->hessian_pre;
 31:     if (bnk->bfgs_pre) PCLMVMClearIS(bnk->bfgs_pre);
 32:   }
 33:   return 0;
 34: }

 36: /* Initializes the KSP solver, the BFGS preconditioner, and the initial trust radius estimation */

 38: PetscErrorCode TaoBNKInitialize(Tao tao, PetscInt initType, PetscBool *needH)
 39: {
 40:   TAO_BNK          *bnk = (TAO_BNK *)tao->data;
 41:   PC                pc;
 42:   PetscReal         f_min, ftrial, prered, actred, kappa, sigma, resnorm;
 43:   PetscReal         tau, tau_1, tau_2, tau_max, tau_min, max_radius;
 44:   PetscBool         is_bfgs, is_jacobi, is_symmetric, sym_set;
 45:   PetscInt          n, N, nDiff;
 46:   PetscInt          i_max = 5;
 47:   PetscInt          j_max = 1;
 48:   PetscInt          i, j;
 49:   PetscVoidFunction kspTR;

 51:   /* Project the current point onto the feasible set */
 52:   TaoComputeVariableBounds(tao);
 53:   TaoSetVariableBounds(bnk->bncg, tao->XL, tao->XU);
 54:   if (tao->bounded) TaoLineSearchSetVariableBounds(tao->linesearch, tao->XL, tao->XU);

 56:   /* Project the initial point onto the feasible region */
 57:   TaoBoundSolution(tao->solution, tao->XL, tao->XU, 0.0, &nDiff, tao->solution);

 59:   /* Check convergence criteria */
 60:   TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, bnk->unprojected_gradient);
 61:   TaoBNKEstimateActiveSet(tao, bnk->as_type);
 62:   VecCopy(bnk->unprojected_gradient, tao->gradient);
 63:   VecISSet(tao->gradient, bnk->active_idx, 0.0);
 64:   TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm);

 66:   /* Test the initial point for convergence */
 67:   VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W);
 68:   VecNorm(bnk->W, NORM_2, &resnorm);
 70:   TaoLogConvergenceHistory(tao, bnk->f, resnorm, 0.0, tao->ksp_its);
 71:   TaoMonitor(tao, tao->niter, bnk->f, resnorm, 0.0, 1.0);
 72:   PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
 73:   if (tao->reason != TAO_CONTINUE_ITERATING) return 0;

 75:   /* Reset KSP stopping reason counters */
 76:   bnk->ksp_atol = 0;
 77:   bnk->ksp_rtol = 0;
 78:   bnk->ksp_dtol = 0;
 79:   bnk->ksp_ctol = 0;
 80:   bnk->ksp_negc = 0;
 81:   bnk->ksp_iter = 0;
 82:   bnk->ksp_othr = 0;

 84:   /* Reset accepted step type counters */
 85:   bnk->tot_cg_its = 0;
 86:   bnk->newt       = 0;
 87:   bnk->bfgs       = 0;
 88:   bnk->sgrad      = 0;
 89:   bnk->grad       = 0;

 91:   /* Initialize the Hessian perturbation */
 92:   bnk->pert = bnk->sval;

 94:   /* Reset initial steplength to zero (this helps BNCG reset its direction internally) */
 95:   VecSet(tao->stepdirection, 0.0);

 97:   /* Allocate the vectors needed for the BFGS approximation */
 98:   KSPGetPC(tao->ksp, &pc);
 99:   PetscObjectTypeCompare((PetscObject)pc, PCLMVM, &is_bfgs);
100:   PetscObjectTypeCompare((PetscObject)pc, PCJACOBI, &is_jacobi);
101:   if (is_bfgs) {
102:     bnk->bfgs_pre = pc;
103:     PCLMVMGetMatLMVM(bnk->bfgs_pre, &bnk->M);
104:     VecGetLocalSize(tao->solution, &n);
105:     VecGetSize(tao->solution, &N);
106:     MatSetSizes(bnk->M, n, n, N, N);
107:     MatLMVMAllocate(bnk->M, tao->solution, bnk->unprojected_gradient);
108:     MatIsSymmetricKnown(bnk->M, &sym_set, &is_symmetric);
110:   } else if (is_jacobi) PCJacobiSetUseAbs(pc, PETSC_TRUE);

112:   /* Prepare the min/max vectors for safeguarding diagonal scales */
113:   VecSet(bnk->Diag_min, bnk->dmin);
114:   VecSet(bnk->Diag_max, bnk->dmax);

116:   /* Initialize trust-region radius.  The initialization is only performed
117:      when we are using Nash, Steihaug-Toint or the Generalized Lanczos method. */
118:   *needH = PETSC_TRUE;
119:   PetscObjectQueryFunction((PetscObject)tao->ksp, "KSPCGSetRadius_C", &kspTR);
120:   if (kspTR) {
121:     switch (initType) {
122:     case BNK_INIT_CONSTANT:
123:       /* Use the initial radius specified */
124:       tao->trust = tao->trust0;
125:       break;

127:     case BNK_INIT_INTERPOLATION:
128:       /* Use interpolation based on the initial Hessian */
129:       max_radius = 0.0;
130:       tao->trust = tao->trust0;
131:       for (j = 0; j < j_max; ++j) {
132:         f_min = bnk->f;
133:         sigma = 0.0;

135:         if (*needH) {
136:           /* Compute the Hessian at the new step, and extract the inactive subsystem */
137:           (*bnk->computehessian)(tao);
138:           TaoBNKEstimateActiveSet(tao, BNK_AS_NONE);
139:           TaoBNKComputeSubHessian(tao);
140:           *needH = PETSC_FALSE;
141:         }

143:         for (i = 0; i < i_max; ++i) {
144:           /* Take a steepest descent step and snap it to bounds */
145:           VecCopy(tao->solution, bnk->Xold);
146:           VecAXPY(tao->solution, -tao->trust / bnk->gnorm, tao->gradient);
147:           TaoBoundSolution(tao->solution, tao->XL, tao->XU, 0.0, &nDiff, tao->solution);
148:           /* Compute the step we actually accepted */
149:           VecCopy(tao->solution, bnk->W);
150:           VecAXPY(bnk->W, -1.0, bnk->Xold);
151:           /* Compute the objective at the trial */
152:           TaoComputeObjective(tao, tao->solution, &ftrial);
154:           VecCopy(bnk->Xold, tao->solution);
155:           if (PetscIsInfOrNanReal(ftrial)) {
156:             tau = bnk->gamma1_i;
157:           } else {
158:             if (ftrial < f_min) {
159:               f_min = ftrial;
160:               sigma = -tao->trust / bnk->gnorm;
161:             }

163:             /* Compute the predicted and actual reduction */
164:             if (bnk->active_idx) {
165:               VecGetSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive);
166:               VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
167:             } else {
168:               bnk->X_inactive    = bnk->W;
169:               bnk->inactive_work = bnk->Xwork;
170:             }
171:             MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work);
172:             VecDot(bnk->X_inactive, bnk->inactive_work, &prered);
173:             if (bnk->active_idx) {
174:               VecRestoreSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive);
175:               VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
176:             }
177:             prered = tao->trust * (bnk->gnorm - 0.5 * tao->trust * prered / (bnk->gnorm * bnk->gnorm));
178:             actred = bnk->f - ftrial;
179:             if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) {
180:               kappa = 1.0;
181:             } else {
182:               kappa = actred / prered;
183:             }

185:             tau_1   = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust + (1.0 - bnk->theta_i) * prered - actred);
186:             tau_2   = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust - (1.0 + bnk->theta_i) * prered + actred);
187:             tau_min = PetscMin(tau_1, tau_2);
188:             tau_max = PetscMax(tau_1, tau_2);

190:             if (PetscAbsScalar(kappa - (PetscReal)1.0) <= bnk->mu1_i) {
191:               /*  Great agreement */
192:               max_radius = PetscMax(max_radius, tao->trust);

194:               if (tau_max < 1.0) {
195:                 tau = bnk->gamma3_i;
196:               } else if (tau_max > bnk->gamma4_i) {
197:                 tau = bnk->gamma4_i;
198:               } else {
199:                 tau = tau_max;
200:               }
201:             } else if (PetscAbsScalar(kappa - (PetscReal)1.0) <= bnk->mu2_i) {
202:               /*  Good agreement */
203:               max_radius = PetscMax(max_radius, tao->trust);

205:               if (tau_max < bnk->gamma2_i) {
206:                 tau = bnk->gamma2_i;
207:               } else if (tau_max > bnk->gamma3_i) {
208:                 tau = bnk->gamma3_i;
209:               } else {
210:                 tau = tau_max;
211:               }
212:             } else {
213:               /*  Not good agreement */
214:               if (tau_min > 1.0) {
215:                 tau = bnk->gamma2_i;
216:               } else if (tau_max < bnk->gamma1_i) {
217:                 tau = bnk->gamma1_i;
218:               } else if ((tau_min < bnk->gamma1_i) && (tau_max >= 1.0)) {
219:                 tau = bnk->gamma1_i;
220:               } else if ((tau_1 >= bnk->gamma1_i) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1_i) || (tau_2 >= 1.0))) {
221:                 tau = tau_1;
222:               } else if ((tau_2 >= bnk->gamma1_i) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1_i) || (tau_2 >= 1.0))) {
223:                 tau = tau_2;
224:               } else {
225:                 tau = tau_max;
226:               }
227:             }
228:           }
229:           tao->trust = tau * tao->trust;
230:         }

232:         if (f_min < bnk->f) {
233:           /* We accidentally found a solution better than the initial, so accept it */
234:           bnk->f = f_min;
235:           VecCopy(tao->solution, bnk->Xold);
236:           VecAXPY(tao->solution, sigma, tao->gradient);
237:           TaoBoundSolution(tao->solution, tao->XL, tao->XU, 0.0, &nDiff, tao->solution);
238:           VecCopy(tao->solution, tao->stepdirection);
239:           VecAXPY(tao->stepdirection, -1.0, bnk->Xold);
240:           TaoComputeGradient(tao, tao->solution, bnk->unprojected_gradient);
241:           TaoBNKEstimateActiveSet(tao, bnk->as_type);
242:           VecCopy(bnk->unprojected_gradient, tao->gradient);
243:           VecISSet(tao->gradient, bnk->active_idx, 0.0);
244:           /* Compute gradient at the new iterate and flip switch to compute the Hessian later */
245:           TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm);
246:           *needH = PETSC_TRUE;
247:           /* Test the new step for convergence */
248:           VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W);
249:           VecNorm(bnk->W, NORM_2, &resnorm);
251:           TaoLogConvergenceHistory(tao, bnk->f, resnorm, 0.0, tao->ksp_its);
252:           TaoMonitor(tao, tao->niter, bnk->f, resnorm, 0.0, 1.0);
253:           PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
254:           if (tao->reason != TAO_CONTINUE_ITERATING) return 0;
255:           /* active BNCG recycling early because we have a stepdirection computed */
256:           TaoSetRecycleHistory(bnk->bncg, PETSC_TRUE);
257:         }
258:       }
259:       tao->trust = PetscMax(tao->trust, max_radius);

261:       /* Ensure that the trust radius is within the limits */
262:       tao->trust = PetscMax(tao->trust, bnk->min_radius);
263:       tao->trust = PetscMin(tao->trust, bnk->max_radius);
264:       break;

266:     default:
267:       /* Norm of the first direction will initialize radius */
268:       tao->trust = 0.0;
269:       break;
270:     }
271:   }
272:   return 0;
273: }

275: /*------------------------------------------------------------*/

277: /* Computes the exact Hessian and extracts its subHessian */

279: PetscErrorCode TaoBNKComputeHessian(Tao tao)
280: {
281:   TAO_BNK *bnk = (TAO_BNK *)tao->data;

283:   /* Compute the Hessian */
284:   TaoComputeHessian(tao, tao->solution, tao->hessian, tao->hessian_pre);
285:   /* Add a correction to the BFGS preconditioner */
286:   if (bnk->M) MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
287:   /* Prepare the reduced sub-matrices for the inactive set */
288:   TaoBNKComputeSubHessian(tao);
289:   return 0;
290: }

292: /*------------------------------------------------------------*/

294: /* Routine for estimating the active set */

296: PetscErrorCode TaoBNKEstimateActiveSet(Tao tao, PetscInt asType)
297: {
298:   TAO_BNK  *bnk = (TAO_BNK *)tao->data;
299:   PetscBool hessComputed, diagExists, hadactive;

301:   hadactive = bnk->active_idx ? PETSC_TRUE : PETSC_FALSE;
302:   switch (asType) {
303:   case BNK_AS_NONE:
304:     ISDestroy(&bnk->inactive_idx);
305:     VecWhichInactive(tao->XL, tao->solution, bnk->unprojected_gradient, tao->XU, PETSC_TRUE, &bnk->inactive_idx);
306:     ISDestroy(&bnk->active_idx);
307:     ISComplementVec(bnk->inactive_idx, tao->solution, &bnk->active_idx);
308:     break;

310:   case BNK_AS_BERTSEKAS:
311:     /* Compute the trial step vector with which we will estimate the active set at the next iteration */
312:     if (bnk->M) {
313:       /* If the BFGS preconditioner matrix is available, we will construct a trial step with it */
314:       MatSolve(bnk->M, bnk->unprojected_gradient, bnk->W);
315:     } else {
316:       hessComputed = diagExists = PETSC_FALSE;
317:       if (tao->hessian) MatAssembled(tao->hessian, &hessComputed);
318:       if (hessComputed) MatHasOperation(tao->hessian, MATOP_GET_DIAGONAL, &diagExists);
319:       if (diagExists) {
320:         /* BFGS preconditioner doesn't exist so let's invert the absolute diagonal of the Hessian instead onto the gradient */
321:         MatGetDiagonal(tao->hessian, bnk->Xwork);
322:         VecAbs(bnk->Xwork);
323:         VecMedian(bnk->Diag_min, bnk->Xwork, bnk->Diag_max, bnk->Xwork);
324:         VecReciprocal(bnk->Xwork);
325:         VecPointwiseMult(bnk->W, bnk->Xwork, bnk->unprojected_gradient);
326:       } else {
327:         /* If the Hessian or its diagonal does not exist, we will simply use gradient step */
328:         VecCopy(bnk->unprojected_gradient, bnk->W);
329:       }
330:     }
331:     VecScale(bnk->W, -1.0);
332:     TaoEstimateActiveBounds(tao->solution, tao->XL, tao->XU, bnk->unprojected_gradient, bnk->W, bnk->Xwork, bnk->as_step, &bnk->as_tol, &bnk->active_lower, &bnk->active_upper, &bnk->active_fixed, &bnk->active_idx, &bnk->inactive_idx);
333:     break;

335:   default:
336:     break;
337:   }
338:   bnk->resetksp = (PetscBool)(bnk->active_idx || hadactive); /* inactive Hessian size may have changed, need to reset operators */
339:   return 0;
340: }

342: /*------------------------------------------------------------*/

344: /* Routine for bounding the step direction */

346: PetscErrorCode TaoBNKBoundStep(Tao tao, PetscInt asType, Vec step)
347: {
348:   TAO_BNK *bnk = (TAO_BNK *)tao->data;

350:   switch (asType) {
351:   case BNK_AS_NONE:
352:     VecISSet(step, bnk->active_idx, 0.0);
353:     break;

355:   case BNK_AS_BERTSEKAS:
356:     TaoBoundStep(tao->solution, tao->XL, tao->XU, bnk->active_lower, bnk->active_upper, bnk->active_fixed, 1.0, step);
357:     break;

359:   default:
360:     break;
361:   }
362:   return 0;
363: }

365: /*------------------------------------------------------------*/

367: /* Routine for taking a finite number of BNCG iterations to
368:    accelerate Newton convergence.

370:    In practice, this approach simply trades off Hessian evaluations
371:    for more gradient evaluations.
372: */

374: PetscErrorCode TaoBNKTakeCGSteps(Tao tao, PetscBool *terminate)
375: {
376:   TAO_BNK *bnk = (TAO_BNK *)tao->data;

378:   *terminate = PETSC_FALSE;
379:   if (bnk->max_cg_its > 0) {
380:     /* Copy the current function value (important vectors are already shared) */
381:     bnk->bncg_ctx->f = bnk->f;
382:     /* Take some small finite number of BNCG iterations */
383:     TaoSolve(bnk->bncg);
384:     /* Add the number of gradient and function evaluations to the total */
385:     tao->nfuncs += bnk->bncg->nfuncs;
386:     tao->nfuncgrads += bnk->bncg->nfuncgrads;
387:     tao->ngrads += bnk->bncg->ngrads;
388:     tao->nhess += bnk->bncg->nhess;
389:     bnk->tot_cg_its += bnk->bncg->niter;
390:     /* Extract the BNCG function value out and save it into BNK */
391:     bnk->f = bnk->bncg_ctx->f;
392:     if (bnk->bncg->reason == TAO_CONVERGED_GATOL || bnk->bncg->reason == TAO_CONVERGED_GRTOL || bnk->bncg->reason == TAO_CONVERGED_GTTOL || bnk->bncg->reason == TAO_CONVERGED_MINF) {
393:       *terminate = PETSC_TRUE;
394:     } else {
395:       TaoBNKEstimateActiveSet(tao, bnk->as_type);
396:     }
397:   }
398:   return 0;
399: }

401: /*------------------------------------------------------------*/

403: /* Routine for computing the Newton step. */

405: PetscErrorCode TaoBNKComputeStep(Tao tao, PetscBool shift, KSPConvergedReason *ksp_reason, PetscInt *step_type)
406: {
407:   TAO_BNK          *bnk         = (TAO_BNK *)tao->data;
408:   PetscInt          bfgsUpdates = 0;
409:   PetscInt          kspits;
410:   PetscBool         is_lmvm;
411:   PetscVoidFunction kspTR;

413:   /* If there are no inactive variables left, save some computation and return an adjusted zero step
414:      that has (l-x) and (u-x) for lower and upper bounded variables. */
415:   if (!bnk->inactive_idx) {
416:     VecSet(tao->stepdirection, 0.0);
417:     TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
418:     return 0;
419:   }

421:   /* Shift the reduced Hessian matrix */
422:   if (shift && bnk->pert > 0) {
423:     PetscObjectTypeCompare((PetscObject)tao->hessian, MATLMVM, &is_lmvm);
424:     if (is_lmvm) {
425:       MatShift(tao->hessian, bnk->pert);
426:     } else {
427:       MatShift(bnk->H_inactive, bnk->pert);
428:       if (bnk->H_inactive != bnk->Hpre_inactive) MatShift(bnk->Hpre_inactive, bnk->pert);
429:     }
430:   }

432:   /* Solve the Newton system of equations */
433:   tao->ksp_its = 0;
434:   VecSet(tao->stepdirection, 0.0);
435:   if (bnk->resetksp) {
436:     KSPReset(tao->ksp);
437:     KSPResetFromOptions(tao->ksp);
438:     bnk->resetksp = PETSC_FALSE;
439:   }
440:   KSPSetOperators(tao->ksp, bnk->H_inactive, bnk->Hpre_inactive);
441:   VecCopy(bnk->unprojected_gradient, bnk->Gwork);
442:   if (bnk->active_idx) {
443:     VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
444:     VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
445:   } else {
446:     bnk->G_inactive = bnk->unprojected_gradient;
447:     bnk->X_inactive = tao->stepdirection;
448:   }
449:   KSPCGSetRadius(tao->ksp, tao->trust);
450:   KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);
451:   KSPGetIterationNumber(tao->ksp, &kspits);
452:   tao->ksp_its += kspits;
453:   tao->ksp_tot_its += kspits;
454:   PetscObjectQueryFunction((PetscObject)tao->ksp, "KSPCGGetNormD_C", &kspTR);
455:   if (kspTR) {
456:     KSPCGGetNormD(tao->ksp, &bnk->dnorm);

458:     if (0.0 == tao->trust) {
459:       /* Radius was uninitialized; use the norm of the direction */
460:       if (bnk->dnorm > 0.0) {
461:         tao->trust = bnk->dnorm;

463:         /* Modify the radius if it is too large or small */
464:         tao->trust = PetscMax(tao->trust, bnk->min_radius);
465:         tao->trust = PetscMin(tao->trust, bnk->max_radius);
466:       } else {
467:         /* The direction was bad; set radius to default value and re-solve
468:            the trust-region subproblem to get a direction */
469:         tao->trust = tao->trust0;

471:         /* Modify the radius if it is too large or small */
472:         tao->trust = PetscMax(tao->trust, bnk->min_radius);
473:         tao->trust = PetscMin(tao->trust, bnk->max_radius);

475:         KSPCGSetRadius(tao->ksp, tao->trust);
476:         KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);
477:         KSPGetIterationNumber(tao->ksp, &kspits);
478:         tao->ksp_its += kspits;
479:         tao->ksp_tot_its += kspits;
480:         KSPCGGetNormD(tao->ksp, &bnk->dnorm);

483:       }
484:     }
485:   }
486:   /* Restore sub vectors back */
487:   if (bnk->active_idx) {
488:     VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
489:     VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
490:   }
491:   /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
492:   VecScale(tao->stepdirection, -1.0);
493:   TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);

495:   /* Record convergence reasons */
496:   KSPGetConvergedReason(tao->ksp, ksp_reason);
497:   if (KSP_CONVERGED_ATOL == *ksp_reason) {
498:     ++bnk->ksp_atol;
499:   } else if (KSP_CONVERGED_RTOL == *ksp_reason) {
500:     ++bnk->ksp_rtol;
501:   } else if (KSP_CONVERGED_CG_CONSTRAINED == *ksp_reason) {
502:     ++bnk->ksp_ctol;
503:   } else if (KSP_CONVERGED_CG_NEG_CURVE == *ksp_reason) {
504:     ++bnk->ksp_negc;
505:   } else if (KSP_DIVERGED_DTOL == *ksp_reason) {
506:     ++bnk->ksp_dtol;
507:   } else if (KSP_DIVERGED_ITS == *ksp_reason) {
508:     ++bnk->ksp_iter;
509:   } else {
510:     ++bnk->ksp_othr;
511:   }

513:   /* Make sure the BFGS preconditioner is healthy */
514:   if (bnk->M) {
515:     MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);
516:     if ((KSP_DIVERGED_INDEFINITE_PC == *ksp_reason) && (bfgsUpdates > 0)) {
517:       /* Preconditioner is numerically indefinite; reset the approximation. */
518:       MatLMVMReset(bnk->M, PETSC_FALSE);
519:       MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
520:     }
521:   }
522:   *step_type = BNK_NEWTON;
523:   return 0;
524: }

526: /*------------------------------------------------------------*/

528: /* Routine for recomputing the predicted reduction for a given step vector */

530: PetscErrorCode TaoBNKRecomputePred(Tao tao, Vec S, PetscReal *prered)
531: {
532:   TAO_BNK *bnk = (TAO_BNK *)tao->data;

534:   /* Extract subvectors associated with the inactive set */
535:   if (bnk->active_idx) {
536:     VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
537:     VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
538:     VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
539:   } else {
540:     bnk->X_inactive    = tao->stepdirection;
541:     bnk->inactive_work = bnk->Xwork;
542:     bnk->G_inactive    = bnk->Gwork;
543:   }
544:   /* Recompute the predicted decrease based on the quadratic model */
545:   MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work);
546:   VecAYPX(bnk->inactive_work, -0.5, bnk->G_inactive);
547:   VecDot(bnk->inactive_work, bnk->X_inactive, prered);
548:   /* Restore the sub vectors */
549:   if (bnk->active_idx) {
550:     VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
551:     VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
552:     VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
553:   }
554:   return 0;
555: }

557: /*------------------------------------------------------------*/

559: /* Routine for ensuring that the Newton step is a descent direction.

561:    The step direction falls back onto BFGS, scaled gradient and gradient steps
562:    in the event that the Newton step fails the test.
563: */

565: PetscErrorCode TaoBNKSafeguardStep(Tao tao, KSPConvergedReason ksp_reason, PetscInt *stepType)
566: {
567:   TAO_BNK  *bnk = (TAO_BNK *)tao->data;
568:   PetscReal gdx, e_min;
569:   PetscInt  bfgsUpdates;

571:   switch (*stepType) {
572:   case BNK_NEWTON:
573:     VecDot(tao->stepdirection, tao->gradient, &gdx);
574:     if ((gdx >= 0.0) || PetscIsInfOrNanReal(gdx)) {
575:       /* Newton step is not descent or direction produced Inf or NaN
576:         Update the perturbation for next time */
577:       if (bnk->pert <= 0.0) {
578:         PetscBool is_gltr;

580:         /* Initialize the perturbation */
581:         bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
582:         PetscObjectTypeCompare((PetscObject)(tao->ksp), KSPGLTR, &is_gltr);
583:         if (is_gltr) {
584:           KSPGLTRGetMinEig(tao->ksp, &e_min);
585:           bnk->pert = PetscMax(bnk->pert, -e_min);
586:         }
587:       } else {
588:         /* Increase the perturbation */
589:         bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
590:       }

592:       if (!bnk->M) {
593:         /* We don't have the bfgs matrix around and updated
594:           Must use gradient direction in this case */
595:         VecCopy(tao->gradient, tao->stepdirection);
596:         *stepType = BNK_GRADIENT;
597:       } else {
598:         /* Attempt to use the BFGS direction */
599:         MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);

601:         /* Check for success (descent direction)
602:           NOTE: Negative gdx here means not a descent direction because
603:           the fall-back step is missing a negative sign. */
604:         VecDot(tao->gradient, tao->stepdirection, &gdx);
605:         if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) {
606:           /* BFGS direction is not descent or direction produced not a number
607:             We can assert bfgsUpdates > 1 in this case because
608:             the first solve produces the scaled gradient direction,
609:             which is guaranteed to be descent */

611:           /* Use steepest descent direction (scaled) */
612:           MatLMVMReset(bnk->M, PETSC_FALSE);
613:           MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
614:           MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);

616:           *stepType = BNK_SCALED_GRADIENT;
617:         } else {
618:           MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);
619:           if (1 == bfgsUpdates) {
620:             /* The first BFGS direction is always the scaled gradient */
621:             *stepType = BNK_SCALED_GRADIENT;
622:           } else {
623:             *stepType = BNK_BFGS;
624:           }
625:         }
626:       }
627:       /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
628:       VecScale(tao->stepdirection, -1.0);
629:       TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
630:     } else {
631:       /* Computed Newton step is descent */
632:       switch (ksp_reason) {
633:       case KSP_DIVERGED_NANORINF:
634:       case KSP_DIVERGED_BREAKDOWN:
635:       case KSP_DIVERGED_INDEFINITE_MAT:
636:       case KSP_DIVERGED_INDEFINITE_PC:
637:       case KSP_CONVERGED_CG_NEG_CURVE:
638:         /* Matrix or preconditioner is indefinite; increase perturbation */
639:         if (bnk->pert <= 0.0) {
640:           PetscBool is_gltr;

642:           /* Initialize the perturbation */
643:           bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
644:           PetscObjectTypeCompare((PetscObject)(tao->ksp), KSPGLTR, &is_gltr);
645:           if (is_gltr) {
646:             KSPGLTRGetMinEig(tao->ksp, &e_min);
647:             bnk->pert = PetscMax(bnk->pert, -e_min);
648:           }
649:         } else {
650:           /* Increase the perturbation */
651:           bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
652:         }
653:         break;

655:       default:
656:         /* Newton step computation is good; decrease perturbation */
657:         bnk->pert = PetscMin(bnk->psfac * bnk->pert, bnk->pmsfac * bnk->gnorm);
658:         if (bnk->pert < bnk->pmin) bnk->pert = 0.0;
659:         break;
660:       }
661:       *stepType = BNK_NEWTON;
662:     }
663:     break;

665:   case BNK_BFGS:
666:     /* Check for success (descent direction) */
667:     VecDot(tao->stepdirection, tao->gradient, &gdx);
668:     if (gdx >= 0 || PetscIsInfOrNanReal(gdx)) {
669:       /* Step is not descent or solve was not successful
670:          Use steepest descent direction (scaled) */
671:       MatLMVMReset(bnk->M, PETSC_FALSE);
672:       MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
673:       MatSolve(bnk->M, tao->gradient, tao->stepdirection);
674:       VecScale(tao->stepdirection, -1.0);
675:       TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
676:       *stepType = BNK_SCALED_GRADIENT;
677:     } else {
678:       *stepType = BNK_BFGS;
679:     }
680:     break;

682:   case BNK_SCALED_GRADIENT:
683:     break;

685:   default:
686:     break;
687:   }

689:   return 0;
690: }

692: /*------------------------------------------------------------*/

694: /* Routine for performing a bound-projected More-Thuente line search.

696:   Includes fallbacks to BFGS, scaled gradient, and unscaled gradient steps if the
697:   Newton step does not produce a valid step length.
698: */

700: PetscErrorCode TaoBNKPerformLineSearch(Tao tao, PetscInt *stepType, PetscReal *steplen, TaoLineSearchConvergedReason *reason)
701: {
702:   TAO_BNK                     *bnk = (TAO_BNK *)tao->data;
703:   TaoLineSearchConvergedReason ls_reason;
704:   PetscReal                    e_min, gdx;
705:   PetscInt                     bfgsUpdates;

707:   /* Perform the linesearch */
708:   TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason);
709:   TaoAddLineSearchCounts(tao);

711:   while (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER && *stepType != BNK_SCALED_GRADIENT && *stepType != BNK_GRADIENT) {
712:     /* Linesearch failed, revert solution */
713:     bnk->f = bnk->fold;
714:     VecCopy(bnk->Xold, tao->solution);
715:     VecCopy(bnk->unprojected_gradient_old, bnk->unprojected_gradient);

717:     switch (*stepType) {
718:     case BNK_NEWTON:
719:       /* Failed to obtain acceptable iterate with Newton step
720:          Update the perturbation for next time */
721:       if (bnk->pert <= 0.0) {
722:         PetscBool is_gltr;

724:         /* Initialize the perturbation */
725:         bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
726:         PetscObjectTypeCompare((PetscObject)(tao->ksp), KSPGLTR, &is_gltr);
727:         if (is_gltr) {
728:           KSPGLTRGetMinEig(tao->ksp, &e_min);
729:           bnk->pert = PetscMax(bnk->pert, -e_min);
730:         }
731:       } else {
732:         /* Increase the perturbation */
733:         bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
734:       }

736:       if (!bnk->M) {
737:         /* We don't have the bfgs matrix around and being updated
738:            Must use gradient direction in this case */
739:         VecCopy(bnk->unprojected_gradient, tao->stepdirection);
740:         *stepType = BNK_GRADIENT;
741:       } else {
742:         /* Attempt to use the BFGS direction */
743:         MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);
744:         /* Check for success (descent direction)
745:            NOTE: Negative gdx means not a descent direction because the step here is missing a negative sign. */
746:         VecDot(tao->gradient, tao->stepdirection, &gdx);
747:         if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) {
748:           /* BFGS direction is not descent or direction produced not a number
749:              We can assert bfgsUpdates > 1 in this case
750:              Use steepest descent direction (scaled) */
751:           MatLMVMReset(bnk->M, PETSC_FALSE);
752:           MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
753:           MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);

755:           bfgsUpdates = 1;
756:           *stepType   = BNK_SCALED_GRADIENT;
757:         } else {
758:           MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);
759:           if (1 == bfgsUpdates) {
760:             /* The first BFGS direction is always the scaled gradient */
761:             *stepType = BNK_SCALED_GRADIENT;
762:           } else {
763:             *stepType = BNK_BFGS;
764:           }
765:         }
766:       }
767:       break;

769:     case BNK_BFGS:
770:       /* Can only enter if pc_type == BNK_PC_BFGS
771:          Failed to obtain acceptable iterate with BFGS step
772:          Attempt to use the scaled gradient direction */
773:       MatLMVMReset(bnk->M, PETSC_FALSE);
774:       MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
775:       MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);

777:       bfgsUpdates = 1;
778:       *stepType   = BNK_SCALED_GRADIENT;
779:       break;
780:     }
781:     /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
782:     VecScale(tao->stepdirection, -1.0);
783:     TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);

785:     /* Perform one last line search with the fall-back step */
786:     TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason);
787:     TaoAddLineSearchCounts(tao);
788:   }
789:   *reason = ls_reason;
790:   return 0;
791: }

793: /*------------------------------------------------------------*/

795: /* Routine for updating the trust radius.

797:   Function features three different update methods:
798:   1) Line-search step length based
799:   2) Predicted decrease on the CG quadratic model
800:   3) Interpolation
801: */

803: PetscErrorCode TaoBNKUpdateTrustRadius(Tao tao, PetscReal prered, PetscReal actred, PetscInt updateType, PetscInt stepType, PetscBool *accept)
804: {
805:   TAO_BNK *bnk = (TAO_BNK *)tao->data;

807:   PetscReal step, kappa;
808:   PetscReal gdx, tau_1, tau_2, tau_min, tau_max;

810:   /* Update trust region radius */
811:   *accept = PETSC_FALSE;
812:   switch (updateType) {
813:   case BNK_UPDATE_STEP:
814:     *accept = PETSC_TRUE; /* always accept here because line search succeeded */
815:     if (stepType == BNK_NEWTON) {
816:       TaoLineSearchGetStepLength(tao->linesearch, &step);
817:       if (step < bnk->nu1) {
818:         /* Very bad step taken; reduce radius */
819:         tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust);
820:       } else if (step < bnk->nu2) {
821:         /* Reasonably bad step taken; reduce radius */
822:         tao->trust = bnk->omega2 * PetscMin(bnk->dnorm, tao->trust);
823:       } else if (step < bnk->nu3) {
824:         /*  Reasonable step was taken; leave radius alone */
825:         if (bnk->omega3 < 1.0) {
826:           tao->trust = bnk->omega3 * PetscMin(bnk->dnorm, tao->trust);
827:         } else if (bnk->omega3 > 1.0) {
828:           tao->trust = PetscMax(bnk->omega3 * bnk->dnorm, tao->trust);
829:         }
830:       } else if (step < bnk->nu4) {
831:         /*  Full step taken; increase the radius */
832:         tao->trust = PetscMax(bnk->omega4 * bnk->dnorm, tao->trust);
833:       } else {
834:         /*  More than full step taken; increase the radius */
835:         tao->trust = PetscMax(bnk->omega5 * bnk->dnorm, tao->trust);
836:       }
837:     } else {
838:       /*  Newton step was not good; reduce the radius */
839:       tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust);
840:     }
841:     break;

843:   case BNK_UPDATE_REDUCTION:
844:     if (stepType == BNK_NEWTON) {
845:       if ((prered < 0.0) || PetscIsInfOrNanReal(prered)) {
846:         /* The predicted reduction has the wrong sign.  This cannot
847:            happen in infinite precision arithmetic.  Step should
848:            be rejected! */
849:         tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
850:       } else {
851:         if (PetscIsInfOrNanReal(actred)) {
852:           tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
853:         } else {
854:           if ((PetscAbsScalar(actred) <= PetscMax(1.0, PetscAbsScalar(bnk->f)) * bnk->epsilon) && (PetscAbsScalar(prered) <= PetscMax(1.0, PetscAbsScalar(bnk->f)) * bnk->epsilon)) {
855:             kappa = 1.0;
856:           } else {
857:             kappa = actred / prered;
858:           }
859:           /* Accept or reject the step and update radius */
860:           if (kappa < bnk->eta1) {
861:             /* Reject the step */
862:             tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
863:           } else {
864:             /* Accept the step */
865:             *accept = PETSC_TRUE;
866:             /* Update the trust region radius only if the computed step is at the trust radius boundary */
867:             if (bnk->dnorm == tao->trust) {
868:               if (kappa < bnk->eta2) {
869:                 /* Marginal bad step */
870:                 tao->trust = bnk->alpha2 * tao->trust;
871:               } else if (kappa < bnk->eta3) {
872:                 /* Reasonable step */
873:                 tao->trust = bnk->alpha3 * tao->trust;
874:               } else if (kappa < bnk->eta4) {
875:                 /* Good step */
876:                 tao->trust = bnk->alpha4 * tao->trust;
877:               } else {
878:                 /* Very good step */
879:                 tao->trust = bnk->alpha5 * tao->trust;
880:               }
881:             }
882:           }
883:         }
884:       }
885:     } else {
886:       /*  Newton step was not good; reduce the radius */
887:       tao->trust = bnk->alpha1 * PetscMin(bnk->dnorm, tao->trust);
888:     }
889:     break;

891:   default:
892:     if (stepType == BNK_NEWTON) {
893:       if (prered < 0.0) {
894:         /*  The predicted reduction has the wrong sign.  This cannot */
895:         /*  happen in infinite precision arithmetic.  Step should */
896:         /*  be rejected! */
897:         tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
898:       } else {
899:         if (PetscIsInfOrNanReal(actred)) {
900:           tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
901:         } else {
902:           if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) {
903:             kappa = 1.0;
904:           } else {
905:             kappa = actred / prered;
906:           }

908:           VecDot(tao->gradient, tao->stepdirection, &gdx);
909:           tau_1   = bnk->theta * gdx / (bnk->theta * gdx - (1.0 - bnk->theta) * prered + actred);
910:           tau_2   = bnk->theta * gdx / (bnk->theta * gdx + (1.0 + bnk->theta) * prered - actred);
911:           tau_min = PetscMin(tau_1, tau_2);
912:           tau_max = PetscMax(tau_1, tau_2);

914:           if (kappa >= 1.0 - bnk->mu1) {
915:             /*  Great agreement */
916:             *accept = PETSC_TRUE;
917:             if (tau_max < 1.0) {
918:               tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm);
919:             } else if (tau_max > bnk->gamma4) {
920:               tao->trust = PetscMax(tao->trust, bnk->gamma4 * bnk->dnorm);
921:             } else {
922:               tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm);
923:             }
924:           } else if (kappa >= 1.0 - bnk->mu2) {
925:             /*  Good agreement */
926:             *accept = PETSC_TRUE;
927:             if (tau_max < bnk->gamma2) {
928:               tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm);
929:             } else if (tau_max > bnk->gamma3) {
930:               tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm);
931:             } else if (tau_max < 1.0) {
932:               tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm);
933:             } else {
934:               tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm);
935:             }
936:           } else {
937:             /*  Not good agreement */
938:             if (tau_min > 1.0) {
939:               tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm);
940:             } else if (tau_max < bnk->gamma1) {
941:               tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
942:             } else if ((tau_min < bnk->gamma1) && (tau_max >= 1.0)) {
943:               tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
944:             } else if ((tau_1 >= bnk->gamma1) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1) || (tau_2 >= 1.0))) {
945:               tao->trust = tau_1 * PetscMin(tao->trust, bnk->dnorm);
946:             } else if ((tau_2 >= bnk->gamma1) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1) || (tau_2 >= 1.0))) {
947:               tao->trust = tau_2 * PetscMin(tao->trust, bnk->dnorm);
948:             } else {
949:               tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm);
950:             }
951:           }
952:         }
953:       }
954:     } else {
955:       /*  Newton step was not good; reduce the radius */
956:       tao->trust = bnk->gamma1 * PetscMin(bnk->dnorm, tao->trust);
957:     }
958:     break;
959:   }
960:   /* Make sure the radius does not violate min and max settings */
961:   tao->trust = PetscMin(tao->trust, bnk->max_radius);
962:   tao->trust = PetscMax(tao->trust, bnk->min_radius);
963:   return 0;
964: }

966: /* ---------------------------------------------------------- */

968: PetscErrorCode TaoBNKAddStepCounts(Tao tao, PetscInt stepType)
969: {
970:   TAO_BNK *bnk = (TAO_BNK *)tao->data;

972:   switch (stepType) {
973:   case BNK_NEWTON:
974:     ++bnk->newt;
975:     break;
976:   case BNK_BFGS:
977:     ++bnk->bfgs;
978:     break;
979:   case BNK_SCALED_GRADIENT:
980:     ++bnk->sgrad;
981:     break;
982:   case BNK_GRADIENT:
983:     ++bnk->grad;
984:     break;
985:   default:
986:     break;
987:   }
988:   return 0;
989: }

991: /* ---------------------------------------------------------- */

993: PetscErrorCode TaoSetUp_BNK(Tao tao)
994: {
995:   TAO_BNK *bnk = (TAO_BNK *)tao->data;
996:   PetscInt i;

998:   if (!tao->gradient) VecDuplicate(tao->solution, &tao->gradient);
999:   if (!tao->stepdirection) VecDuplicate(tao->solution, &tao->stepdirection);
1000:   if (!bnk->W) VecDuplicate(tao->solution, &bnk->W);
1001:   if (!bnk->Xold) VecDuplicate(tao->solution, &bnk->Xold);
1002:   if (!bnk->Gold) VecDuplicate(tao->solution, &bnk->Gold);
1003:   if (!bnk->Xwork) VecDuplicate(tao->solution, &bnk->Xwork);
1004:   if (!bnk->Gwork) VecDuplicate(tao->solution, &bnk->Gwork);
1005:   if (!bnk->unprojected_gradient) VecDuplicate(tao->solution, &bnk->unprojected_gradient);
1006:   if (!bnk->unprojected_gradient_old) VecDuplicate(tao->solution, &bnk->unprojected_gradient_old);
1007:   if (!bnk->Diag_min) VecDuplicate(tao->solution, &bnk->Diag_min);
1008:   if (!bnk->Diag_max) VecDuplicate(tao->solution, &bnk->Diag_max);
1009:   if (bnk->max_cg_its > 0) {
1010:     /* Ensure that the important common vectors are shared between BNK and embedded BNCG */
1011:     bnk->bncg_ctx = (TAO_BNCG *)bnk->bncg->data;
1012:     PetscObjectReference((PetscObject)(bnk->unprojected_gradient_old));
1013:     VecDestroy(&bnk->bncg_ctx->unprojected_gradient_old);
1014:     bnk->bncg_ctx->unprojected_gradient_old = bnk->unprojected_gradient_old;
1015:     PetscObjectReference((PetscObject)(bnk->unprojected_gradient));
1016:     VecDestroy(&bnk->bncg_ctx->unprojected_gradient);
1017:     bnk->bncg_ctx->unprojected_gradient = bnk->unprojected_gradient;
1018:     PetscObjectReference((PetscObject)(bnk->Gold));
1019:     VecDestroy(&bnk->bncg_ctx->G_old);
1020:     bnk->bncg_ctx->G_old = bnk->Gold;
1021:     PetscObjectReference((PetscObject)(tao->gradient));
1022:     VecDestroy(&bnk->bncg->gradient);
1023:     bnk->bncg->gradient = tao->gradient;
1024:     PetscObjectReference((PetscObject)(tao->stepdirection));
1025:     VecDestroy(&bnk->bncg->stepdirection);
1026:     bnk->bncg->stepdirection = tao->stepdirection;
1027:     TaoSetSolution(bnk->bncg, tao->solution);
1028:     /* Copy over some settings from BNK into BNCG */
1029:     TaoSetMaximumIterations(bnk->bncg, bnk->max_cg_its);
1030:     TaoSetTolerances(bnk->bncg, tao->gatol, tao->grtol, tao->gttol);
1031:     TaoSetFunctionLowerBound(bnk->bncg, tao->fmin);
1032:     TaoSetConvergenceTest(bnk->bncg, tao->ops->convergencetest, tao->cnvP);
1033:     TaoSetObjective(bnk->bncg, tao->ops->computeobjective, tao->user_objP);
1034:     TaoSetGradient(bnk->bncg, NULL, tao->ops->computegradient, tao->user_gradP);
1035:     TaoSetObjectiveAndGradient(bnk->bncg, NULL, tao->ops->computeobjectiveandgradient, tao->user_objgradP);
1036:     PetscObjectCopyFortranFunctionPointers((PetscObject)tao, (PetscObject)(bnk->bncg));
1037:     for (i = 0; i < tao->numbermonitors; ++i) {
1038:       TaoSetMonitor(bnk->bncg, tao->monitor[i], tao->monitorcontext[i], tao->monitordestroy[i]);
1039:       PetscObjectReference((PetscObject)(tao->monitorcontext[i]));
1040:     }
1041:   }
1042:   bnk->X_inactive    = NULL;
1043:   bnk->G_inactive    = NULL;
1044:   bnk->inactive_work = NULL;
1045:   bnk->active_work   = NULL;
1046:   bnk->inactive_idx  = NULL;
1047:   bnk->active_idx    = NULL;
1048:   bnk->active_lower  = NULL;
1049:   bnk->active_upper  = NULL;
1050:   bnk->active_fixed  = NULL;
1051:   bnk->M             = NULL;
1052:   bnk->H_inactive    = NULL;
1053:   bnk->Hpre_inactive = NULL;
1054:   return 0;
1055: }

1057: /*------------------------------------------------------------*/

1059: PetscErrorCode TaoDestroy_BNK(Tao tao)
1060: {
1061:   TAO_BNK *bnk = (TAO_BNK *)tao->data;

1063:   VecDestroy(&bnk->W);
1064:   VecDestroy(&bnk->Xold);
1065:   VecDestroy(&bnk->Gold);
1066:   VecDestroy(&bnk->Xwork);
1067:   VecDestroy(&bnk->Gwork);
1068:   VecDestroy(&bnk->unprojected_gradient);
1069:   VecDestroy(&bnk->unprojected_gradient_old);
1070:   VecDestroy(&bnk->Diag_min);
1071:   VecDestroy(&bnk->Diag_max);
1072:   ISDestroy(&bnk->active_lower);
1073:   ISDestroy(&bnk->active_upper);
1074:   ISDestroy(&bnk->active_fixed);
1075:   ISDestroy(&bnk->active_idx);
1076:   ISDestroy(&bnk->inactive_idx);
1077:   MatDestroy(&bnk->Hpre_inactive);
1078:   MatDestroy(&bnk->H_inactive);
1079:   TaoDestroy(&bnk->bncg);
1080:   KSPDestroy(&tao->ksp);
1081:   PetscFree(tao->data);
1082:   return 0;
1083: }

1085: /*------------------------------------------------------------*/

1087: PetscErrorCode TaoSetFromOptions_BNK(Tao tao, PetscOptionItems *PetscOptionsObject)
1088: {
1089:   TAO_BNK *bnk = (TAO_BNK *)tao->data;

1091:   PetscOptionsHeadBegin(PetscOptionsObject, "Newton-Krylov method for bound constrained optimization");
1092:   PetscOptionsEList("-tao_bnk_init_type", "radius initialization type", "", BNK_INIT, BNK_INIT_TYPES, BNK_INIT[bnk->init_type], &bnk->init_type, NULL);
1093:   PetscOptionsEList("-tao_bnk_update_type", "radius update type", "", BNK_UPDATE, BNK_UPDATE_TYPES, BNK_UPDATE[bnk->update_type], &bnk->update_type, NULL);
1094:   PetscOptionsEList("-tao_bnk_as_type", "active set estimation method", "", BNK_AS, BNK_AS_TYPES, BNK_AS[bnk->as_type], &bnk->as_type, NULL);
1095:   PetscOptionsReal("-tao_bnk_sval", "(developer) Hessian perturbation starting value", "", bnk->sval, &bnk->sval, NULL);
1096:   PetscOptionsReal("-tao_bnk_imin", "(developer) minimum initial Hessian perturbation", "", bnk->imin, &bnk->imin, NULL);
1097:   PetscOptionsReal("-tao_bnk_imax", "(developer) maximum initial Hessian perturbation", "", bnk->imax, &bnk->imax, NULL);
1098:   PetscOptionsReal("-tao_bnk_imfac", "(developer) initial merit factor for Hessian perturbation", "", bnk->imfac, &bnk->imfac, NULL);
1099:   PetscOptionsReal("-tao_bnk_pmin", "(developer) minimum Hessian perturbation", "", bnk->pmin, &bnk->pmin, NULL);
1100:   PetscOptionsReal("-tao_bnk_pmax", "(developer) maximum Hessian perturbation", "", bnk->pmax, &bnk->pmax, NULL);
1101:   PetscOptionsReal("-tao_bnk_pgfac", "(developer) Hessian perturbation growth factor", "", bnk->pgfac, &bnk->pgfac, NULL);
1102:   PetscOptionsReal("-tao_bnk_psfac", "(developer) Hessian perturbation shrink factor", "", bnk->psfac, &bnk->psfac, NULL);
1103:   PetscOptionsReal("-tao_bnk_pmgfac", "(developer) merit growth factor for Hessian perturbation", "", bnk->pmgfac, &bnk->pmgfac, NULL);
1104:   PetscOptionsReal("-tao_bnk_pmsfac", "(developer) merit shrink factor for Hessian perturbation", "", bnk->pmsfac, &bnk->pmsfac, NULL);
1105:   PetscOptionsReal("-tao_bnk_eta1", "(developer) threshold for rejecting step (-tao_bnk_update_type reduction)", "", bnk->eta1, &bnk->eta1, NULL);
1106:   PetscOptionsReal("-tao_bnk_eta2", "(developer) threshold for accepting marginal step (-tao_bnk_update_type reduction)", "", bnk->eta2, &bnk->eta2, NULL);
1107:   PetscOptionsReal("-tao_bnk_eta3", "(developer) threshold for accepting reasonable step (-tao_bnk_update_type reduction)", "", bnk->eta3, &bnk->eta3, NULL);
1108:   PetscOptionsReal("-tao_bnk_eta4", "(developer) threshold for accepting good step (-tao_bnk_update_type reduction)", "", bnk->eta4, &bnk->eta4, NULL);
1109:   PetscOptionsReal("-tao_bnk_alpha1", "(developer) radius reduction factor for rejected step (-tao_bnk_update_type reduction)", "", bnk->alpha1, &bnk->alpha1, NULL);
1110:   PetscOptionsReal("-tao_bnk_alpha2", "(developer) radius reduction factor for marginally accepted bad step (-tao_bnk_update_type reduction)", "", bnk->alpha2, &bnk->alpha2, NULL);
1111:   PetscOptionsReal("-tao_bnk_alpha3", "(developer) radius increase factor for reasonable accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha3, &bnk->alpha3, NULL);
1112:   PetscOptionsReal("-tao_bnk_alpha4", "(developer) radius increase factor for good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha4, &bnk->alpha4, NULL);
1113:   PetscOptionsReal("-tao_bnk_alpha5", "(developer) radius increase factor for very good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha5, &bnk->alpha5, NULL);
1114:   PetscOptionsReal("-tao_bnk_nu1", "(developer) threshold for small line-search step length (-tao_bnk_update_type step)", "", bnk->nu1, &bnk->nu1, NULL);
1115:   PetscOptionsReal("-tao_bnk_nu2", "(developer) threshold for reasonable line-search step length (-tao_bnk_update_type step)", "", bnk->nu2, &bnk->nu2, NULL);
1116:   PetscOptionsReal("-tao_bnk_nu3", "(developer) threshold for large line-search step length (-tao_bnk_update_type step)", "", bnk->nu3, &bnk->nu3, NULL);
1117:   PetscOptionsReal("-tao_bnk_nu4", "(developer) threshold for very large line-search step length (-tao_bnk_update_type step)", "", bnk->nu4, &bnk->nu4, NULL);
1118:   PetscOptionsReal("-tao_bnk_omega1", "(developer) radius reduction factor for very small line-search step length (-tao_bnk_update_type step)", "", bnk->omega1, &bnk->omega1, NULL);
1119:   PetscOptionsReal("-tao_bnk_omega2", "(developer) radius reduction factor for small line-search step length (-tao_bnk_update_type step)", "", bnk->omega2, &bnk->omega2, NULL);
1120:   PetscOptionsReal("-tao_bnk_omega3", "(developer) radius factor for decent line-search step length (-tao_bnk_update_type step)", "", bnk->omega3, &bnk->omega3, NULL);
1121:   PetscOptionsReal("-tao_bnk_omega4", "(developer) radius increase factor for large line-search step length (-tao_bnk_update_type step)", "", bnk->omega4, &bnk->omega4, NULL);
1122:   PetscOptionsReal("-tao_bnk_omega5", "(developer) radius increase factor for very large line-search step length (-tao_bnk_update_type step)", "", bnk->omega5, &bnk->omega5, NULL);
1123:   PetscOptionsReal("-tao_bnk_mu1_i", "(developer) threshold for accepting very good step (-tao_bnk_init_type interpolation)", "", bnk->mu1_i, &bnk->mu1_i, NULL);
1124:   PetscOptionsReal("-tao_bnk_mu2_i", "(developer) threshold for accepting good step (-tao_bnk_init_type interpolation)", "", bnk->mu2_i, &bnk->mu2_i, NULL);
1125:   PetscOptionsReal("-tao_bnk_gamma1_i", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma1_i, &bnk->gamma1_i, NULL);
1126:   PetscOptionsReal("-tao_bnk_gamma2_i", "(developer) radius reduction factor for rejected bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma2_i, &bnk->gamma2_i, NULL);
1127:   PetscOptionsReal("-tao_bnk_gamma3_i", "(developer) radius increase factor for accepted good step (-tao_bnk_init_type interpolation)", "", bnk->gamma3_i, &bnk->gamma3_i, NULL);
1128:   PetscOptionsReal("-tao_bnk_gamma4_i", "(developer) radius increase factor for accepted very good step (-tao_bnk_init_type interpolation)", "", bnk->gamma4_i, &bnk->gamma4_i, NULL);
1129:   PetscOptionsReal("-tao_bnk_theta_i", "(developer) trust region interpolation factor (-tao_bnk_init_type interpolation)", "", bnk->theta_i, &bnk->theta_i, NULL);
1130:   PetscOptionsReal("-tao_bnk_mu1", "(developer) threshold for accepting very good step (-tao_bnk_update_type interpolation)", "", bnk->mu1, &bnk->mu1, NULL);
1131:   PetscOptionsReal("-tao_bnk_mu2", "(developer) threshold for accepting good step (-tao_bnk_update_type interpolation)", "", bnk->mu2, &bnk->mu2, NULL);
1132:   PetscOptionsReal("-tao_bnk_gamma1", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma1, &bnk->gamma1, NULL);
1133:   PetscOptionsReal("-tao_bnk_gamma2", "(developer) radius reduction factor for rejected bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma2, &bnk->gamma2, NULL);
1134:   PetscOptionsReal("-tao_bnk_gamma3", "(developer) radius increase factor for accepted good step (-tao_bnk_update_type interpolation)", "", bnk->gamma3, &bnk->gamma3, NULL);
1135:   PetscOptionsReal("-tao_bnk_gamma4", "(developer) radius increase factor for accepted very good step (-tao_bnk_update_type interpolation)", "", bnk->gamma4, &bnk->gamma4, NULL);
1136:   PetscOptionsReal("-tao_bnk_theta", "(developer) trust region interpolation factor (-tao_bnk_update_type interpolation)", "", bnk->theta, &bnk->theta, NULL);
1137:   PetscOptionsReal("-tao_bnk_min_radius", "(developer) lower bound on initial radius", "", bnk->min_radius, &bnk->min_radius, NULL);
1138:   PetscOptionsReal("-tao_bnk_max_radius", "(developer) upper bound on radius", "", bnk->max_radius, &bnk->max_radius, NULL);
1139:   PetscOptionsReal("-tao_bnk_epsilon", "(developer) tolerance used when computing actual and predicted reduction", "", bnk->epsilon, &bnk->epsilon, NULL);
1140:   PetscOptionsReal("-tao_bnk_as_tol", "(developer) initial tolerance used when estimating actively bounded variables", "", bnk->as_tol, &bnk->as_tol, NULL);
1141:   PetscOptionsReal("-tao_bnk_as_step", "(developer) step length used when estimating actively bounded variables", "", bnk->as_step, &bnk->as_step, NULL);
1142:   PetscOptionsInt("-tao_bnk_max_cg_its", "number of BNCG iterations to take for each Newton step", "", bnk->max_cg_its, &bnk->max_cg_its, NULL);
1143:   PetscOptionsHeadEnd();

1145:   TaoSetOptionsPrefix(bnk->bncg, ((PetscObject)(tao))->prefix);
1146:   TaoAppendOptionsPrefix(bnk->bncg, "tao_bnk_cg_");
1147:   TaoSetFromOptions(bnk->bncg);

1149:   KSPSetOptionsPrefix(tao->ksp, ((PetscObject)(tao))->prefix);
1150:   KSPAppendOptionsPrefix(tao->ksp, "tao_bnk_");
1151:   KSPSetFromOptions(tao->ksp);
1152:   return 0;
1153: }

1155: /*------------------------------------------------------------*/

1157: PetscErrorCode TaoView_BNK(Tao tao, PetscViewer viewer)
1158: {
1159:   TAO_BNK  *bnk = (TAO_BNK *)tao->data;
1160:   PetscInt  nrejects;
1161:   PetscBool isascii;

1163:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii);
1164:   if (isascii) {
1165:     PetscViewerASCIIPushTab(viewer);
1166:     TaoView(bnk->bncg, viewer);
1167:     if (bnk->M) {
1168:       MatLMVMGetRejectCount(bnk->M, &nrejects);
1169:       PetscViewerASCIIPrintf(viewer, "Rejected BFGS updates: %" PetscInt_FMT "\n", nrejects);
1170:     }
1171:     PetscViewerASCIIPrintf(viewer, "CG steps: %" PetscInt_FMT "\n", bnk->tot_cg_its);
1172:     PetscViewerASCIIPrintf(viewer, "Newton steps: %" PetscInt_FMT "\n", bnk->newt);
1173:     if (bnk->M) PetscViewerASCIIPrintf(viewer, "BFGS steps: %" PetscInt_FMT "\n", bnk->bfgs);
1174:     PetscViewerASCIIPrintf(viewer, "Scaled gradient steps: %" PetscInt_FMT "\n", bnk->sgrad);
1175:     PetscViewerASCIIPrintf(viewer, "Gradient steps: %" PetscInt_FMT "\n", bnk->grad);
1176:     PetscViewerASCIIPrintf(viewer, "KSP termination reasons:\n");
1177:     PetscViewerASCIIPrintf(viewer, "  atol: %" PetscInt_FMT "\n", bnk->ksp_atol);
1178:     PetscViewerASCIIPrintf(viewer, "  rtol: %" PetscInt_FMT "\n", bnk->ksp_rtol);
1179:     PetscViewerASCIIPrintf(viewer, "  ctol: %" PetscInt_FMT "\n", bnk->ksp_ctol);
1180:     PetscViewerASCIIPrintf(viewer, "  negc: %" PetscInt_FMT "\n", bnk->ksp_negc);
1181:     PetscViewerASCIIPrintf(viewer, "  dtol: %" PetscInt_FMT "\n", bnk->ksp_dtol);
1182:     PetscViewerASCIIPrintf(viewer, "  iter: %" PetscInt_FMT "\n", bnk->ksp_iter);
1183:     PetscViewerASCIIPrintf(viewer, "  othr: %" PetscInt_FMT "\n", bnk->ksp_othr);
1184:     PetscViewerASCIIPopTab(viewer);
1185:   }
1186:   return 0;
1187: }

1189: /* ---------------------------------------------------------- */

1191: /*MC
1192:   TAOBNK - Shared base-type for Bounded Newton-Krylov type algorithms.
1193:   At each iteration, the BNK methods solve the symmetric
1194:   system of equations to obtain the step diretion dk:
1195:               Hk dk = -gk
1196:   for free variables only. The step can be globalized either through
1197:   trust-region methods, or a line search, or a heuristic mixture of both.

1199:     Options Database Keys:
1200: + -tao_bnk_max_cg_its - maximum number of bounded conjugate-gradient iterations taken in each Newton loop
1201: . -tao_bnk_init_type - trust radius initialization method ("constant", "direction", "interpolation")
1202: . -tao_bnk_update_type - trust radius update method ("step", "direction", "interpolation")
1203: . -tao_bnk_as_type - active-set estimation method ("none", "bertsekas")
1204: . -tao_bnk_as_tol - (developer) initial tolerance used in estimating bounded active variables (-as_type bertsekas)
1205: . -tao_bnk_as_step - (developer) trial step length used in estimating bounded active variables (-as_type bertsekas)
1206: . -tao_bnk_sval - (developer) Hessian perturbation starting value
1207: . -tao_bnk_imin - (developer) minimum initial Hessian perturbation
1208: . -tao_bnk_imax - (developer) maximum initial Hessian perturbation
1209: . -tao_bnk_pmin - (developer) minimum Hessian perturbation
1210: . -tao_bnk_pmax - (developer) aximum Hessian perturbation
1211: . -tao_bnk_pgfac - (developer) Hessian perturbation growth factor
1212: . -tao_bnk_psfac - (developer) Hessian perturbation shrink factor
1213: . -tao_bnk_imfac - (developer) initial merit factor for Hessian perturbation
1214: . -tao_bnk_pmgfac - (developer) merit growth factor for Hessian perturbation
1215: . -tao_bnk_pmsfac - (developer) merit shrink factor for Hessian perturbation
1216: . -tao_bnk_eta1 - (developer) threshold for rejecting step (-update_type reduction)
1217: . -tao_bnk_eta2 - (developer) threshold for accepting marginal step (-update_type reduction)
1218: . -tao_bnk_eta3 - (developer) threshold for accepting reasonable step (-update_type reduction)
1219: . -tao_bnk_eta4 - (developer) threshold for accepting good step (-update_type reduction)
1220: . -tao_bnk_alpha1 - (developer) radius reduction factor for rejected step (-update_type reduction)
1221: . -tao_bnk_alpha2 - (developer) radius reduction factor for marginally accepted bad step (-update_type reduction)
1222: . -tao_bnk_alpha3 - (developer) radius increase factor for reasonable accepted step (-update_type reduction)
1223: . -tao_bnk_alpha4 - (developer) radius increase factor for good accepted step (-update_type reduction)
1224: . -tao_bnk_alpha5 - (developer) radius increase factor for very good accepted step (-update_type reduction)
1225: . -tao_bnk_epsilon - (developer) tolerance for small pred/actual ratios that trigger automatic step acceptance (-update_type reduction)
1226: . -tao_bnk_mu1 - (developer) threshold for accepting very good step (-update_type interpolation)
1227: . -tao_bnk_mu2 - (developer) threshold for accepting good step (-update_type interpolation)
1228: . -tao_bnk_gamma1 - (developer) radius reduction factor for rejected very bad step (-update_type interpolation)
1229: . -tao_bnk_gamma2 - (developer) radius reduction factor for rejected bad step (-update_type interpolation)
1230: . -tao_bnk_gamma3 - (developer) radius increase factor for accepted good step (-update_type interpolation)
1231: . -tao_bnk_gamma4 - (developer) radius increase factor for accepted very good step (-update_type interpolation)
1232: . -tao_bnk_theta - (developer) trust region interpolation factor (-update_type interpolation)
1233: . -tao_bnk_nu1 - (developer) threshold for small line-search step length (-update_type step)
1234: . -tao_bnk_nu2 - (developer) threshold for reasonable line-search step length (-update_type step)
1235: . -tao_bnk_nu3 - (developer) threshold for large line-search step length (-update_type step)
1236: . -tao_bnk_nu4 - (developer) threshold for very large line-search step length (-update_type step)
1237: . -tao_bnk_omega1 - (developer) radius reduction factor for very small line-search step length (-update_type step)
1238: . -tao_bnk_omega2 - (developer) radius reduction factor for small line-search step length (-update_type step)
1239: . -tao_bnk_omega3 - (developer) radius factor for decent line-search step length (-update_type step)
1240: . -tao_bnk_omega4 - (developer) radius increase factor for large line-search step length (-update_type step)
1241: . -tao_bnk_omega5 - (developer) radius increase factor for very large line-search step length (-update_type step)
1242: . -tao_bnk_mu1_i -  (developer) threshold for accepting very good step (-init_type interpolation)
1243: . -tao_bnk_mu2_i -  (developer) threshold for accepting good step (-init_type interpolation)
1244: . -tao_bnk_gamma1_i - (developer) radius reduction factor for rejected very bad step (-init_type interpolation)
1245: . -tao_bnk_gamma2_i - (developer) radius reduction factor for rejected bad step (-init_type interpolation)
1246: . -tao_bnk_gamma3_i - (developer) radius increase factor for accepted good step (-init_type interpolation)
1247: . -tao_bnk_gamma4_i - (developer) radius increase factor for accepted very good step (-init_type interpolation)
1248: - -tao_bnk_theta_i - (developer) trust region interpolation factor (-init_type interpolation)

1250:   Level: beginner
1251: M*/

1253: PetscErrorCode TaoCreate_BNK(Tao tao)
1254: {
1255:   TAO_BNK *bnk;
1256:   PC       pc;

1258:   PetscNew(&bnk);

1260:   tao->ops->setup          = TaoSetUp_BNK;
1261:   tao->ops->view           = TaoView_BNK;
1262:   tao->ops->setfromoptions = TaoSetFromOptions_BNK;
1263:   tao->ops->destroy        = TaoDestroy_BNK;

1265:   /*  Override default settings (unless already changed) */
1266:   if (!tao->max_it_changed) tao->max_it = 50;
1267:   if (!tao->trust0_changed) tao->trust0 = 100.0;

1269:   tao->data = (void *)bnk;

1271:   /*  Hessian shifting parameters */
1272:   bnk->computehessian = TaoBNKComputeHessian;
1273:   bnk->computestep    = TaoBNKComputeStep;

1275:   bnk->sval  = 0.0;
1276:   bnk->imin  = 1.0e-4;
1277:   bnk->imax  = 1.0e+2;
1278:   bnk->imfac = 1.0e-1;

1280:   bnk->pmin   = 1.0e-12;
1281:   bnk->pmax   = 1.0e+2;
1282:   bnk->pgfac  = 1.0e+1;
1283:   bnk->psfac  = 4.0e-1;
1284:   bnk->pmgfac = 1.0e-1;
1285:   bnk->pmsfac = 1.0e-1;

1287:   /*  Default values for trust-region radius update based on steplength */
1288:   bnk->nu1 = 0.25;
1289:   bnk->nu2 = 0.50;
1290:   bnk->nu3 = 1.00;
1291:   bnk->nu4 = 1.25;

1293:   bnk->omega1 = 0.25;
1294:   bnk->omega2 = 0.50;
1295:   bnk->omega3 = 1.00;
1296:   bnk->omega4 = 2.00;
1297:   bnk->omega5 = 4.00;

1299:   /*  Default values for trust-region radius update based on reduction */
1300:   bnk->eta1 = 1.0e-4;
1301:   bnk->eta2 = 0.25;
1302:   bnk->eta3 = 0.50;
1303:   bnk->eta4 = 0.90;

1305:   bnk->alpha1 = 0.25;
1306:   bnk->alpha2 = 0.50;
1307:   bnk->alpha3 = 1.00;
1308:   bnk->alpha4 = 2.00;
1309:   bnk->alpha5 = 4.00;

1311:   /*  Default values for trust-region radius update based on interpolation */
1312:   bnk->mu1 = 0.10;
1313:   bnk->mu2 = 0.50;

1315:   bnk->gamma1 = 0.25;
1316:   bnk->gamma2 = 0.50;
1317:   bnk->gamma3 = 2.00;
1318:   bnk->gamma4 = 4.00;

1320:   bnk->theta = 0.05;

1322:   /*  Default values for trust region initialization based on interpolation */
1323:   bnk->mu1_i = 0.35;
1324:   bnk->mu2_i = 0.50;

1326:   bnk->gamma1_i = 0.0625;
1327:   bnk->gamma2_i = 0.5;
1328:   bnk->gamma3_i = 2.0;
1329:   bnk->gamma4_i = 5.0;

1331:   bnk->theta_i = 0.25;

1333:   /*  Remaining parameters */
1334:   bnk->max_cg_its = 0;
1335:   bnk->min_radius = 1.0e-10;
1336:   bnk->max_radius = 1.0e10;
1337:   bnk->epsilon    = PetscPowReal(PETSC_MACHINE_EPSILON, 2.0 / 3.0);
1338:   bnk->as_tol     = 1.0e-3;
1339:   bnk->as_step    = 1.0e-3;
1340:   bnk->dmin       = 1.0e-6;
1341:   bnk->dmax       = 1.0e6;

1343:   bnk->M           = NULL;
1344:   bnk->bfgs_pre    = NULL;
1345:   bnk->init_type   = BNK_INIT_INTERPOLATION;
1346:   bnk->update_type = BNK_UPDATE_REDUCTION;
1347:   bnk->as_type     = BNK_AS_BERTSEKAS;

1349:   /* Create the embedded BNCG solver */
1350:   TaoCreate(PetscObjectComm((PetscObject)tao), &bnk->bncg);
1351:   PetscObjectIncrementTabLevel((PetscObject)bnk->bncg, (PetscObject)tao, 1);
1352:   TaoSetType(bnk->bncg, TAOBNCG);

1354:   /* Create the line search */
1355:   TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch);
1356:   PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1);
1357:   TaoLineSearchSetType(tao->linesearch, TAOLINESEARCHMT);
1358:   TaoLineSearchUseTaoRoutines(tao->linesearch, tao);

1360:   /*  Set linear solver to default for symmetric matrices */
1361:   KSPCreate(((PetscObject)tao)->comm, &tao->ksp);
1362:   PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1);
1363:   KSPSetType(tao->ksp, KSPSTCG);
1364:   KSPGetPC(tao->ksp, &pc);
1365:   PCSetType(pc, PCLMVM);
1366:   return 0;
1367: }