Actual source code: taosolver_fg.c
1: #include <petsc/private/taoimpl.h>
3: /*@
4: TaoSetSolution - Sets the vector holding the initial guess for the solve
6: Logically collective
8: Input Parameters:
9: + tao - the Tao context
10: - x0 - the initial guess
12: Level: beginner
13: .seealso: `Tao`, `TaoCreate()`, `TaoSolve()`, `TaoGetSolution()`
14: @*/
15: PetscErrorCode TaoSetSolution(Tao tao, Vec x0)
16: {
19: PetscObjectReference((PetscObject)x0);
20: VecDestroy(&tao->solution);
21: tao->solution = x0;
22: return 0;
23: }
25: PetscErrorCode TaoTestGradient(Tao tao, Vec x, Vec g1)
26: {
27: Vec g2, g3;
28: PetscBool complete_print = PETSC_FALSE, test = PETSC_FALSE;
29: PetscReal hcnorm, fdnorm, hcmax, fdmax, diffmax, diffnorm;
30: PetscScalar dot;
31: MPI_Comm comm;
32: PetscViewer viewer, mviewer;
33: PetscViewerFormat format;
34: PetscInt tabs;
35: static PetscBool directionsprinted = PETSC_FALSE;
37: PetscObjectOptionsBegin((PetscObject)tao);
38: PetscOptionsName("-tao_test_gradient", "Compare hand-coded and finite difference Gradients", "None", &test);
39: PetscOptionsViewer("-tao_test_gradient_view", "View difference between hand-coded and finite difference Gradients element entries", "None", &mviewer, &format, &complete_print);
40: PetscOptionsEnd();
41: if (!test) {
42: if (complete_print) PetscViewerDestroy(&mviewer);
43: return 0;
44: }
46: PetscObjectGetComm((PetscObject)tao, &comm);
47: PetscViewerASCIIGetStdout(comm, &viewer);
48: PetscViewerASCIIGetTab(viewer, &tabs);
49: PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
50: PetscViewerASCIIPrintf(viewer, " ---------- Testing Gradient -------------\n");
51: if (!complete_print && !directionsprinted) {
52: PetscViewerASCIIPrintf(viewer, " Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n");
53: PetscViewerASCIIPrintf(viewer, " of hand-coded and finite difference gradient entries greater than <threshold>.\n");
54: }
55: if (!directionsprinted) {
56: PetscViewerASCIIPrintf(viewer, " Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n");
57: PetscViewerASCIIPrintf(viewer, " O(1.e-8), the hand-coded Gradient is probably correct.\n");
58: directionsprinted = PETSC_TRUE;
59: }
60: if (complete_print) PetscViewerPushFormat(mviewer, format);
62: VecDuplicate(x, &g2);
63: VecDuplicate(x, &g3);
65: /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
66: TaoDefaultComputeGradient(tao, x, g2, NULL);
68: VecNorm(g2, NORM_2, &fdnorm);
69: VecNorm(g1, NORM_2, &hcnorm);
70: VecNorm(g2, NORM_INFINITY, &fdmax);
71: VecNorm(g1, NORM_INFINITY, &hcmax);
72: VecDot(g1, g2, &dot);
73: VecCopy(g1, g3);
74: VecAXPY(g3, -1.0, g2);
75: VecNorm(g3, NORM_2, &diffnorm);
76: VecNorm(g3, NORM_INFINITY, &diffmax);
77: PetscViewerASCIIPrintf(viewer, " ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot) / (fdnorm * hcnorm)));
78: PetscViewerASCIIPrintf(viewer, " 2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffnorm / PetscMax(hcnorm, fdnorm)), (double)diffnorm);
79: PetscViewerASCIIPrintf(viewer, " max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffmax / PetscMax(hcmax, fdmax)), (double)diffmax);
81: if (complete_print) {
82: PetscViewerASCIIPrintf(viewer, " Hand-coded gradient ----------\n");
83: VecView(g1, mviewer);
84: PetscViewerASCIIPrintf(viewer, " Finite difference gradient ----------\n");
85: VecView(g2, mviewer);
86: PetscViewerASCIIPrintf(viewer, " Hand-coded minus finite-difference gradient ----------\n");
87: VecView(g3, mviewer);
88: }
89: VecDestroy(&g2);
90: VecDestroy(&g3);
92: if (complete_print) {
93: PetscViewerPopFormat(mviewer);
94: PetscViewerDestroy(&mviewer);
95: }
96: PetscViewerASCIISetTab(viewer, tabs);
97: return 0;
98: }
100: /*@
101: TaoComputeGradient - Computes the gradient of the objective function
103: Collective
105: Input Parameters:
106: + tao - the Tao context
107: - X - input vector
109: Output Parameter:
110: . G - gradient vector
112: Options Database Keys:
113: + -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
114: - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
116: Note:
117: `TaoComputeGradient()` is typically used within the implementation of the optimization method,
118: so most users would not generally call this routine themselves.
120: Level: developer
122: .seealso: `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetGradient()`
123: @*/
124: PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
125: {
126: PetscReal dummy;
133: VecLockReadPush(X);
134: if (tao->ops->computegradient) {
135: PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL);
136: PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP));
137: PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL);
138: tao->ngrads++;
139: } else if (tao->ops->computeobjectiveandgradient) {
140: PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL);
141: PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, &dummy, G, tao->user_objgradP));
142: PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL);
143: tao->nfuncgrads++;
144: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetGradient() has not been called");
145: VecLockReadPop(X);
147: TaoTestGradient(tao, X, G);
148: return 0;
149: }
151: /*@
152: TaoComputeObjective - Computes the objective function value at a given point
154: Collective
156: Input Parameters:
157: + tao - the Tao context
158: - X - input vector
160: Output Parameter:
161: . f - Objective value at X
163: Note:
164: `TaoComputeObjective()` is typically used within the implementation of the optimization algorithm
165: so most users would not generally call this routine themselves.
167: Level: developer
169: .seealso: `Tao`, `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
170: @*/
171: PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
172: {
173: Vec temp;
178: VecLockReadPush(X);
179: if (tao->ops->computeobjective) {
180: PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL);
181: PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP));
182: PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL);
183: tao->nfuncs++;
184: } else if (tao->ops->computeobjectiveandgradient) {
185: PetscInfo(tao, "Duplicating variable vector in order to call func/grad routine\n");
186: VecDuplicate(X, &temp);
187: PetscLogEventBegin(TAO_ObjGradEval, tao, X, NULL, NULL);
188: PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, temp, tao->user_objgradP));
189: PetscLogEventEnd(TAO_ObjGradEval, tao, X, NULL, NULL);
190: VecDestroy(&temp);
191: tao->nfuncgrads++;
192: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() has not been called");
193: PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f));
194: VecLockReadPop(X);
195: return 0;
196: }
198: /*@
199: TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
201: Collective
203: Input Parameters:
204: + tao - the Tao context
205: - X - input vector
207: Output Parameters:
208: + f - Objective value at X
209: - g - Gradient vector at X
211: Note:
212: `TaoComputeObjectiveAndGradient()` is typically used within the implementation of the optimization algorithm,
213: so most users would not generally call this routine themselves.
215: Level: developer
217: .seealso: `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
218: @*/
219: PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
220: {
226: VecLockReadPush(X);
227: if (tao->ops->computeobjectiveandgradient) {
228: PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL);
229: if (tao->ops->computegradient == TaoDefaultComputeGradient) {
230: TaoComputeObjective(tao, X, f);
231: TaoDefaultComputeGradient(tao, X, G, NULL);
232: } else PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, G, tao->user_objgradP));
233: PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL);
234: tao->nfuncgrads++;
235: } else if (tao->ops->computeobjective && tao->ops->computegradient) {
236: PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL);
237: PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP));
238: PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL);
239: tao->nfuncs++;
240: PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL);
241: PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP));
242: PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL);
243: tao->ngrads++;
244: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() or TaoSetGradient() not set");
245: PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f));
246: VecLockReadPop(X);
248: TaoTestGradient(tao, X, G);
249: return 0;
250: }
252: /*@C
253: TaoSetObjective - Sets the function evaluation routine for minimization
255: Logically collective
257: Input Parameters:
258: + tao - the Tao context
259: . func - the objective function
260: - ctx - [optional] user-defined context for private data for the function evaluation
261: routine (may be NULL)
263: Calling sequence of func:
264: $ func (Tao tao, Vec x, PetscReal *f, void *ctx);
266: + x - input vector
267: . f - function value
268: - ctx - [optional] user-defined function context
270: Level: beginner
272: .seealso: `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetObjective()`
273: @*/
274: PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, void *), void *ctx)
275: {
277: if (ctx) tao->user_objP = ctx;
278: if (func) tao->ops->computeobjective = func;
279: return 0;
280: }
282: /*@C
283: TaoGetObjective - Gets the function evaluation routine for the function to be minimized
285: Not collective
287: Input Parameter:
288: . tao - the Tao context
290: Output Parameters
291: + func - the objective function
292: - ctx - the user-defined context for private data for the function evaluation
294: Calling sequence of func:
295: $ func (Tao tao, Vec x, PetscReal *f, void *ctx);
297: + x - input vector
298: . f - function value
299: - ctx - [optional] user-defined function context
301: Level: beginner
303: .seealso: `Tao`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjective()`
304: @*/
305: PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao, Vec, PetscReal *, void *), void **ctx)
306: {
308: if (func) *func = tao->ops->computeobjective;
309: if (ctx) *ctx = tao->user_objP;
310: return 0;
311: }
313: /*@C
314: TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
316: Logically collective
318: Input Parameters:
319: + tao - the Tao context
320: . func - the residual evaluation routine
321: - ctx - [optional] user-defined context for private data for the function evaluation
322: routine (may be NULL)
324: Calling sequence of func:
325: $ func (Tao tao, Vec x, Vec f, void *ctx);
327: + x - input vector
328: . f - function value vector
329: - ctx - [optional] user-defined function context
331: Level: beginner
333: .seealso: `Tao`, `TaoSetObjective()`, `TaoSetJacobianRoutine()`
334: @*/
335: PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void *), void *ctx)
336: {
339: PetscObjectReference((PetscObject)res);
340: if (tao->ls_res) VecDestroy(&tao->ls_res);
341: tao->ls_res = res;
342: tao->user_lsresP = ctx;
343: tao->ops->computeresidual = func;
345: return 0;
346: }
348: /*@
349: TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give.
350: If this function is not provided, or if sigma_v and sigma_w are both NULL, then the identity matrix will be used for weights.
352: Collective
354: Input Parameters:
355: + tao - the Tao context
356: . sigma_v - vector of weights (diagonal terms only)
357: . n - the number of weights (if using off-diagonal)
358: . rows - index list of rows for sigma_w
359: . cols - index list of columns for sigma_w
360: - vals - array of weights
362: Note: Either sigma_v or sigma_w (or both) should be NULL
364: Level: intermediate
366: .seealso: `Tao`, `TaoSetResidualRoutine()`
367: @*/
368: PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
369: {
370: PetscInt i;
374: PetscObjectReference((PetscObject)sigma_v);
375: VecDestroy(&tao->res_weights_v);
376: tao->res_weights_v = sigma_v;
377: if (vals) {
378: PetscFree(tao->res_weights_rows);
379: PetscFree(tao->res_weights_cols);
380: PetscFree(tao->res_weights_w);
381: PetscMalloc1(n, &tao->res_weights_rows);
382: PetscMalloc1(n, &tao->res_weights_cols);
383: PetscMalloc1(n, &tao->res_weights_w);
384: tao->res_weights_n = n;
385: for (i = 0; i < n; i++) {
386: tao->res_weights_rows[i] = rows[i];
387: tao->res_weights_cols[i] = cols[i];
388: tao->res_weights_w[i] = vals[i];
389: }
390: } else {
391: tao->res_weights_n = 0;
392: tao->res_weights_rows = NULL;
393: tao->res_weights_cols = NULL;
394: }
395: return 0;
396: }
398: /*@
399: TaoComputeResidual - Computes a least-squares residual vector at a given point
401: Collective
403: Input Parameters:
404: + tao - the Tao context
405: - X - input vector
407: Output Parameter:
408: . f - Objective vector at X
410: Notes:
411: `TaoComputeResidual()` is typically used within the implementation of the optimization algorithm,
412: so most users would not generally call this routine themselves.
414: Level: advanced
416: .seealso: `Tao`, `TaoSetResidualRoutine()`
417: @*/
418: PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
419: {
425: if (tao->ops->computeresidual) {
426: PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL);
427: PetscCallBack("Tao callback least-squares residual", (*tao->ops->computeresidual)(tao, X, F, tao->user_lsresP));
428: PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL);
429: tao->nfuncs++;
430: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetResidualRoutine() has not been called");
431: PetscInfo(tao, "TAO least-squares residual evaluation.\n");
432: return 0;
433: }
435: /*@C
436: TaoSetGradient - Sets the gradient evaluation routine for the function to be optimized
438: Logically collective
440: Input Parameters:
441: + tao - the Tao context
442: . g - [optional] the vector to internally hold the gradient computation
443: . func - the gradient function
444: - ctx - [optional] user-defined context for private data for the gradient evaluation
445: routine (may be NULL)
447: Calling sequence of func:
448: $ func (Tao tao, Vec x, Vec g, void *ctx);
450: + x - input vector
451: . g - gradient value (output)
452: - ctx - [optional] user-defined function context
454: Level: beginner
456: .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetGradient()`
457: @*/
458: PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, Vec, void *), void *ctx)
459: {
461: if (g) {
464: PetscObjectReference((PetscObject)g);
465: VecDestroy(&tao->gradient);
466: tao->gradient = g;
467: }
468: if (func) tao->ops->computegradient = func;
469: if (ctx) tao->user_gradP = ctx;
470: return 0;
471: }
473: /*@C
474: TaoGetGradient - Gets the gradient evaluation routine for the function being optimized
476: Not collective
478: Input Parameter:
479: . tao - the Tao context
481: Output Parameters:
482: + g - the vector to internally hold the gradient computation
483: . func - the gradient function
484: - ctx - user-defined context for private data for the gradient evaluation routine
486: Calling sequence of func:
487: $ func (Tao tao, Vec x, Vec g, void *ctx);
489: + x - input vector
490: . g - gradient value (output)
491: - ctx - [optional] user-defined function context
493: Level: beginner
495: .seealso: `Tao`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoSetGradient()`
496: @*/
497: PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, Vec, void *), void **ctx)
498: {
500: if (g) *g = tao->gradient;
501: if (func) *func = tao->ops->computegradient;
502: if (ctx) *ctx = tao->user_gradP;
503: return 0;
504: }
506: /*@C
507: TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for the function to be optimized
509: Logically collective
511: Input Parameters:
512: + tao - the Tao context
513: . g - [optional] the vector to internally hold the gradient computation
514: . func - the gradient function
515: - ctx - [optional] user-defined context for private data for the gradient evaluation
516: routine (may be NULL)
518: Calling sequence of func:
519: $ func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
521: + x - input vector
522: . f - objective value (output)
523: . g - gradient value (output)
524: - ctx - [optional] user-defined function context
526: Level: beginner
528: Note:
529: For some optimization methods using a combined function can be more eifficient.
531: .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetGradient()`, `TaoGetObjectiveAndGradient()`
532: @*/
533: PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
534: {
536: if (g) {
539: PetscObjectReference((PetscObject)g);
540: VecDestroy(&tao->gradient);
541: tao->gradient = g;
542: }
543: if (ctx) tao->user_objgradP = ctx;
544: if (func) tao->ops->computeobjectiveandgradient = func;
545: return 0;
546: }
548: /*@C
549: TaoGetObjectiveAndGradient - Gets the combined objective function and gradient evaluation routine for the function to be optimized
551: Not collective
553: Input Parameter:
554: . tao - the Tao context
556: Output Parameters:
557: + g - the vector to internally hold the gradient computation
558: . func - the gradient function
559: - ctx - user-defined context for private data for the gradient evaluation routine
561: Calling sequence of func:
562: $ func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
564: + x - input vector
565: . f - objective value (output)
566: . g - gradient value (output)
567: - ctx - [optional] user-defined function context
569: Level: beginner
571: .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`
572: @*/
573: PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, PetscReal *, Vec, void *), void **ctx)
574: {
576: if (g) *g = tao->gradient;
577: if (func) *func = tao->ops->computeobjectiveandgradient;
578: if (ctx) *ctx = tao->user_objgradP;
579: return 0;
580: }
582: /*@
583: TaoIsObjectiveDefined - Checks to see if the user has
584: declared an objective-only routine. Useful for determining when
585: it is appropriate to call `TaoComputeObjective()` or
586: `TaoComputeObjectiveAndGradient()`
588: Not collective
590: Input Parameter:
591: . tao - the Tao context
593: Output Parameter:
594: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
596: Level: developer
598: .seealso: `Tao`, `TaoSetObjective()`, `TaoIsGradientDefined()`, `TaoIsObjectiveAndGradientDefined()`
599: @*/
600: PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
601: {
603: if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE;
604: else *flg = PETSC_TRUE;
605: return 0;
606: }
608: /*@
609: TaoIsGradientDefined - Checks to see if the user has
610: declared an objective-only routine. Useful for determining when
611: it is appropriate to call `TaoComputeGradient()` or
612: `TaoComputeGradientAndGradient()`
614: Not Collective
616: Input Parameter:
617: . tao - the Tao context
619: Output Parameter:
620: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
622: Level: developer
624: .seealso: `TaoSetGradient()`, `TaoIsObjectiveDefined()`, `TaoIsObjectiveAndGradientDefined()`
625: @*/
626: PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
627: {
629: if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE;
630: else *flg = PETSC_TRUE;
631: return 0;
632: }
634: /*@
635: TaoIsObjectiveAndGradientDefined - Checks to see if the user has
636: declared a joint objective/gradient routine. Useful for determining when
637: it is appropriate to call `TaoComputeObjective()` or
638: `TaoComputeObjectiveAndGradient()`
640: Not Collective
642: Input Parameter:
643: . tao - the Tao context
645: Output Parameter:
646: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
648: Level: developer
650: .seealso: `TaoSetObjectiveAndGradient()`, `TaoIsObjectiveDefined()`, `TaoIsGradientDefined()`
651: @*/
652: PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
653: {
655: if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE;
656: else *flg = PETSC_TRUE;
657: return 0;
658: }