Actual source code: taosolver_fg.c

  1: #include <petsc/private/taoimpl.h>

  3: /*@
  4:   TaoSetSolution - Sets the vector holding the initial guess for the solve

  6:   Logically Collective

  8:   Input Parameters:
  9: + tao - the `Tao` context
 10: - x0  - the initial guess

 12:   Level: beginner

 14: .seealso: [](ch_tao), `Tao`, `TaoCreate()`, `TaoSolve()`, `TaoGetSolution()`
 15: @*/
 16: PetscErrorCode TaoSetSolution(Tao tao, Vec x0)
 17: {
 18:   PetscFunctionBegin;
 21:   PetscCall(PetscObjectReference((PetscObject)x0));
 22:   PetscCall(VecDestroy(&tao->solution));
 23:   tao->solution = x0;
 24:   PetscFunctionReturn(PETSC_SUCCESS);
 25: }

 27: PetscErrorCode TaoTestGradient(Tao tao, Vec x, Vec g1)
 28: {
 29:   Vec               g2, g3;
 30:   PetscBool         complete_print = PETSC_FALSE, test = PETSC_FALSE;
 31:   PetscReal         hcnorm, fdnorm, hcmax, fdmax, diffmax, diffnorm;
 32:   PetscScalar       dot;
 33:   MPI_Comm          comm;
 34:   PetscViewer       viewer, mviewer;
 35:   PetscViewerFormat format;
 36:   PetscInt          tabs;
 37:   static PetscBool  directionsprinted = PETSC_FALSE;

 39:   PetscFunctionBegin;
 40:   PetscObjectOptionsBegin((PetscObject)tao);
 41:   PetscCall(PetscOptionsName("-tao_test_gradient", "Compare hand-coded and finite difference Gradients", "None", &test));
 42:   PetscCall(PetscOptionsViewer("-tao_test_gradient_view", "View difference between hand-coded and finite difference Gradients element entries", "None", &mviewer, &format, &complete_print));
 43:   PetscOptionsEnd();
 44:   if (!test) {
 45:     if (complete_print) PetscCall(PetscViewerDestroy(&mviewer));
 46:     PetscFunctionReturn(PETSC_SUCCESS);
 47:   }

 49:   PetscCall(PetscObjectGetComm((PetscObject)tao, &comm));
 50:   PetscCall(PetscViewerASCIIGetStdout(comm, &viewer));
 51:   PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
 52:   PetscCall(PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel));
 53:   PetscCall(PetscViewerASCIIPrintf(viewer, "  ---------- Testing Gradient -------------\n"));
 54:   if (!complete_print && !directionsprinted) {
 55:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n"));
 56:     PetscCall(PetscViewerASCIIPrintf(viewer, "    of hand-coded and finite difference gradient entries greater than <threshold>.\n"));
 57:   }
 58:   if (!directionsprinted) {
 59:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n"));
 60:     PetscCall(PetscViewerASCIIPrintf(viewer, "    O(1.e-8), the hand-coded Gradient is probably correct.\n"));
 61:     directionsprinted = PETSC_TRUE;
 62:   }
 63:   if (complete_print) PetscCall(PetscViewerPushFormat(mviewer, format));

 65:   PetscCall(VecDuplicate(x, &g2));
 66:   PetscCall(VecDuplicate(x, &g3));

 68:   /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
 69:   PetscCall(TaoDefaultComputeGradient(tao, x, g2, NULL));

 71:   PetscCall(VecNorm(g2, NORM_2, &fdnorm));
 72:   PetscCall(VecNorm(g1, NORM_2, &hcnorm));
 73:   PetscCall(VecNorm(g2, NORM_INFINITY, &fdmax));
 74:   PetscCall(VecNorm(g1, NORM_INFINITY, &hcmax));
 75:   PetscCall(VecDot(g1, g2, &dot));
 76:   PetscCall(VecCopy(g1, g3));
 77:   PetscCall(VecAXPY(g3, -1.0, g2));
 78:   PetscCall(VecNorm(g3, NORM_2, &diffnorm));
 79:   PetscCall(VecNorm(g3, NORM_INFINITY, &diffmax));
 80:   PetscCall(PetscViewerASCIIPrintf(viewer, "  ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot) / (fdnorm * hcnorm))));
 81:   PetscCall(PetscViewerASCIIPrintf(viewer, "  2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffnorm / PetscMax(hcnorm, fdnorm)), (double)diffnorm));
 82:   PetscCall(PetscViewerASCIIPrintf(viewer, "  max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffmax / PetscMax(hcmax, fdmax)), (double)diffmax));

 84:   if (complete_print) {
 85:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Hand-coded gradient ----------\n"));
 86:     PetscCall(VecView(g1, mviewer));
 87:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Finite difference gradient ----------\n"));
 88:     PetscCall(VecView(g2, mviewer));
 89:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Hand-coded minus finite-difference gradient ----------\n"));
 90:     PetscCall(VecView(g3, mviewer));
 91:   }
 92:   PetscCall(VecDestroy(&g2));
 93:   PetscCall(VecDestroy(&g3));

 95:   if (complete_print) {
 96:     PetscCall(PetscViewerPopFormat(mviewer));
 97:     PetscCall(PetscViewerDestroy(&mviewer));
 98:   }
 99:   PetscCall(PetscViewerASCIISetTab(viewer, tabs));
100:   PetscFunctionReturn(PETSC_SUCCESS);
101: }

103: /*@
104:   TaoComputeGradient - Computes the gradient of the objective function

106:   Collective

108:   Input Parameters:
109: + tao - the `Tao` context
110: - X   - input vector

112:   Output Parameter:
113: . G - gradient vector

115:   Options Database Keys:
116: + -tao_test_gradient      - compare the user provided gradient with one compute via finite differences to check for errors
117: - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient

119:   Level: developer

121:   Note:
122:   `TaoComputeGradient()` is typically used within the implementation of the optimization method,
123:   so most users would not generally call this routine themselves.

125: .seealso: [](ch_tao), `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetGradient()`
126: @*/
127: PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
128: {
129:   PetscReal dummy;

131:   PetscFunctionBegin;
135:   PetscCheckSameComm(tao, 1, X, 2);
136:   PetscCheckSameComm(tao, 1, G, 3);
137:   PetscCall(VecLockReadPush(X));
138:   if (tao->ops->computegradient) {
139:     PetscCall(PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL));
140:     PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP));
141:     PetscCall(PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL));
142:     tao->ngrads++;
143:   } else if (tao->ops->computeobjectiveandgradient) {
144:     PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL));
145:     PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, &dummy, G, tao->user_objgradP));
146:     PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL));
147:     tao->nfuncgrads++;
148:   } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetGradient() has not been called");
149:   PetscCall(VecLockReadPop(X));

151:   PetscCall(TaoTestGradient(tao, X, G));
152:   PetscFunctionReturn(PETSC_SUCCESS);
153: }

155: /*@
156:   TaoComputeObjective - Computes the objective function value at a given point

158:   Collective

160:   Input Parameters:
161: + tao - the `Tao` context
162: - X   - input vector

164:   Output Parameter:
165: . f - Objective value at X

167:   Level: developer

169:   Note:
170:   `TaoComputeObjective()` is typically used within the implementation of the optimization algorithm
171:   so most users would not generally call this routine themselves.

173: .seealso: [](ch_tao), `Tao`, `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
174: @*/
175: PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
176: {
177:   Vec temp;

179:   PetscFunctionBegin;
182:   PetscCheckSameComm(tao, 1, X, 2);
183:   PetscCall(VecLockReadPush(X));
184:   if (tao->ops->computeobjective) {
185:     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL));
186:     PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP));
187:     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL));
188:     tao->nfuncs++;
189:   } else if (tao->ops->computeobjectiveandgradient) {
190:     PetscCall(PetscInfo(tao, "Duplicating variable vector in order to call func/grad routine\n"));
191:     PetscCall(VecDuplicate(X, &temp));
192:     PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, NULL, NULL));
193:     PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, temp, tao->user_objgradP));
194:     PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, NULL, NULL));
195:     PetscCall(VecDestroy(&temp));
196:     tao->nfuncgrads++;
197:   } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() has not been called");
198:   PetscCall(PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f)));
199:   PetscCall(VecLockReadPop(X));
200:   PetscFunctionReturn(PETSC_SUCCESS);
201: }

203: /*@
204:   TaoComputeObjectiveAndGradient - Computes the objective function value at a given point

206:   Collective

208:   Input Parameters:
209: + tao - the `Tao` context
210: - X   - input vector

212:   Output Parameters:
213: + f - Objective value at `X`
214: - G - Gradient vector at `X`

216:   Level: developer

218:   Note:
219:   `TaoComputeObjectiveAndGradient()` is typically used within the implementation of the optimization algorithm,
220:   so most users would not generally call this routine themselves.

222: .seealso: [](ch_tao), `TaoComputeGradient()`, `TaoSetObjective()`
223: @*/
224: PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
225: {
226:   PetscFunctionBegin;
230:   PetscCheckSameComm(tao, 1, X, 2);
231:   PetscCheckSameComm(tao, 1, G, 4);
232:   PetscCall(VecLockReadPush(X));
233:   if (tao->ops->computeobjectiveandgradient) {
234:     PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL));
235:     if (tao->ops->computegradient == TaoDefaultComputeGradient) {
236:       PetscCall(TaoComputeObjective(tao, X, f));
237:       PetscCall(TaoDefaultComputeGradient(tao, X, G, NULL));
238:     } else PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, G, tao->user_objgradP));
239:     PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL));
240:     tao->nfuncgrads++;
241:   } else if (tao->ops->computeobjective && tao->ops->computegradient) {
242:     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL));
243:     PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP));
244:     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL));
245:     tao->nfuncs++;
246:     PetscCall(PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL));
247:     PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP));
248:     PetscCall(PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL));
249:     tao->ngrads++;
250:   } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() or TaoSetGradient() not set");
251:   PetscCall(PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f)));
252:   PetscCall(VecLockReadPop(X));

254:   PetscCall(TaoTestGradient(tao, X, G));
255:   PetscFunctionReturn(PETSC_SUCCESS);
256: }

258: /*@C
259:   TaoSetObjective - Sets the function evaluation routine for minimization

261:   Logically Collective

263:   Input Parameters:
264: + tao  - the `Tao` context
265: . func - the objective function
266: - ctx  - [optional] user-defined context for private data for the function evaluation
267:         routine (may be `NULL`)

269:   Calling sequence of `func`:
270: + tao - the optimizer
271: . x   - input vector
272: . f   - function value
273: - ctx - [optional] user-defined function context

275:   Level: beginner

277: .seealso: [](ch_tao), `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetObjective()`
278: @*/
279: PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao tao, Vec x, PetscReal *f, void *ctx), void *ctx)
280: {
281:   PetscFunctionBegin;
283:   if (ctx) tao->user_objP = ctx;
284:   if (func) tao->ops->computeobjective = func;
285:   PetscFunctionReturn(PETSC_SUCCESS);
286: }

288: /*@C
289:   TaoGetObjective - Gets the function evaluation routine for the function to be minimized

291:   Not Collective

293:   Input Parameter:
294: . tao - the `Tao` context

296:   Output Parameters:
297: + func - the objective function
298: - ctx  - the user-defined context for private data for the function evaluation

300:   Calling sequence of `func`:
301: + tao - the optimizer
302: . x   - input vector
303: . f   - function value
304: - ctx - [optional] user-defined function context

306:   Level: beginner

308: .seealso: [](ch_tao), `Tao`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjective()`
309: @*/
310: PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao tao, Vec x, PetscReal *f, void *ctx), void **ctx)
311: {
312:   PetscFunctionBegin;
314:   if (func) *func = tao->ops->computeobjective;
315:   if (ctx) *ctx = tao->user_objP;
316:   PetscFunctionReturn(PETSC_SUCCESS);
317: }

319: /*@C
320:   TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications

322:   Logically Collective

324:   Input Parameters:
325: + tao  - the `Tao` context
326: . res  - the residual vector
327: . func - the residual evaluation routine
328: - ctx  - [optional] user-defined context for private data for the function evaluation
329:          routine (may be `NULL`)

331:   Calling sequence of `func`:
332: + tao - the optimizer
333: . x   - input vector
334: . res - function value vector
335: - ctx - [optional] user-defined function context

337:   Level: beginner

339: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoSetJacobianRoutine()`
340: @*/
341: PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao tao, Vec x, Vec res, void *ctx), void *ctx)
342: {
343:   PetscFunctionBegin;
346:   PetscCall(PetscObjectReference((PetscObject)res));
347:   if (tao->ls_res) PetscCall(VecDestroy(&tao->ls_res));
348:   tao->ls_res               = res;
349:   tao->user_lsresP          = ctx;
350:   tao->ops->computeresidual = func;
351:   PetscFunctionReturn(PETSC_SUCCESS);
352: }

354: /*@
355:   TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give.

357:   Collective

359:   Input Parameters:
360: + tao     - the `Tao` context
361: . sigma_v - vector of weights (diagonal terms only)
362: . n       - the number of weights (if using off-diagonal)
363: . rows    - index list of rows for `sigma_v`
364: . cols    - index list of columns for `sigma_v`
365: - vals    - array of weights

367:   Level: intermediate

369:   Notes:
370:   If this function is not provided, or if `sigma_v` and `vals` are both `NULL`, then the
371:   identity matrix will be used for weights.

373:   Either `sigma_v` or `vals` should be `NULL`

375: .seealso: [](ch_tao), `Tao`, `TaoSetResidualRoutine()`
376: @*/
377: PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
378: {
379:   PetscInt i;

381:   PetscFunctionBegin;
384:   PetscCall(PetscObjectReference((PetscObject)sigma_v));
385:   PetscCall(VecDestroy(&tao->res_weights_v));
386:   tao->res_weights_v = sigma_v;
387:   if (vals) {
388:     PetscCall(PetscFree(tao->res_weights_rows));
389:     PetscCall(PetscFree(tao->res_weights_cols));
390:     PetscCall(PetscFree(tao->res_weights_w));
391:     PetscCall(PetscMalloc1(n, &tao->res_weights_rows));
392:     PetscCall(PetscMalloc1(n, &tao->res_weights_cols));
393:     PetscCall(PetscMalloc1(n, &tao->res_weights_w));
394:     tao->res_weights_n = n;
395:     for (i = 0; i < n; i++) {
396:       tao->res_weights_rows[i] = rows[i];
397:       tao->res_weights_cols[i] = cols[i];
398:       tao->res_weights_w[i]    = vals[i];
399:     }
400:   } else {
401:     tao->res_weights_n    = 0;
402:     tao->res_weights_rows = NULL;
403:     tao->res_weights_cols = NULL;
404:   }
405:   PetscFunctionReturn(PETSC_SUCCESS);
406: }

408: /*@
409:   TaoComputeResidual - Computes a least-squares residual vector at a given point

411:   Collective

413:   Input Parameters:
414: + tao - the `Tao` context
415: - X   - input vector

417:   Output Parameter:
418: . F - Objective vector at `X`

420:   Level: advanced

422:   Notes:
423:   `TaoComputeResidual()` is typically used within the implementation of the optimization algorithm,
424:   so most users would not generally call this routine themselves.

426: .seealso: [](ch_tao), `Tao`, `TaoSetResidualRoutine()`
427: @*/
428: PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
429: {
430:   PetscFunctionBegin;
434:   PetscCheckSameComm(tao, 1, X, 2);
435:   PetscCheckSameComm(tao, 1, F, 3);
436:   if (tao->ops->computeresidual) {
437:     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL));
438:     PetscCallBack("Tao callback least-squares residual", (*tao->ops->computeresidual)(tao, X, F, tao->user_lsresP));
439:     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL));
440:     tao->nfuncs++;
441:   } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetResidualRoutine() has not been called");
442:   PetscCall(PetscInfo(tao, "TAO least-squares residual evaluation.\n"));
443:   PetscFunctionReturn(PETSC_SUCCESS);
444: }

446: /*@C
447:   TaoSetGradient - Sets the gradient evaluation routine for the function to be optimized

449:   Logically Collective

451:   Input Parameters:
452: + tao  - the `Tao` context
453: . g    - [optional] the vector to internally hold the gradient computation
454: . func - the gradient function
455: - ctx  - [optional] user-defined context for private data for the gradient evaluation
456:         routine (may be `NULL`)

458:   Calling sequence of `func`:
459: + tao - the optimization solver
460: . x   - input vector
461: . g   - gradient value (output)
462: - ctx - [optional] user-defined function context

464:   Level: beginner

466: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetGradient()`
467: @*/
468: PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao tao, Vec x, Vec g, void *ctx), void *ctx)
469: {
470:   PetscFunctionBegin;
472:   if (g) {
474:     PetscCheckSameComm(tao, 1, g, 2);
475:     PetscCall(PetscObjectReference((PetscObject)g));
476:     PetscCall(VecDestroy(&tao->gradient));
477:     tao->gradient = g;
478:   }
479:   if (func) tao->ops->computegradient = func;
480:   if (ctx) tao->user_gradP = ctx;
481:   PetscFunctionReturn(PETSC_SUCCESS);
482: }

484: /*@C
485:   TaoGetGradient - Gets the gradient evaluation routine for the function being optimized

487:   Not Collective

489:   Input Parameter:
490: . tao - the `Tao` context

492:   Output Parameters:
493: + g    - the vector to internally hold the gradient computation
494: . func - the gradient function
495: - ctx  - user-defined context for private data for the gradient evaluation routine

497:   Calling sequence of `func`:
498: + tao - the optimizer
499: . x   - input vector
500: . g   - gradient value (output)
501: - ctx - [optional] user-defined function context

503:   Level: beginner

505: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoSetGradient()`
506: @*/
507: PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao tao, Vec x, Vec g, void *ctx), void **ctx)
508: {
509:   PetscFunctionBegin;
511:   if (g) *g = tao->gradient;
512:   if (func) *func = tao->ops->computegradient;
513:   if (ctx) *ctx = tao->user_gradP;
514:   PetscFunctionReturn(PETSC_SUCCESS);
515: }

517: /*@C
518:   TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for the function to be optimized

520:   Logically Collective

522:   Input Parameters:
523: + tao  - the `Tao` context
524: . g    - [optional] the vector to internally hold the gradient computation
525: . func - the gradient function
526: - ctx  - [optional] user-defined context for private data for the gradient evaluation
527:         routine (may be `NULL`)

529:   Calling sequence of `func`:
530: + tao - the optimization object
531: . x   - input vector
532: . f   - objective value (output)
533: . g   - gradient value (output)
534: - ctx - [optional] user-defined function context

536:   Level: beginner

538:   Note:
539:   For some optimization methods using a combined function can be more eifficient.

541: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetGradient()`, `TaoGetObjectiveAndGradient()`
542: @*/
543: PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao tao, Vec x, PetscReal *f, Vec g, void *ctx), void *ctx)
544: {
545:   PetscFunctionBegin;
547:   if (g) {
549:     PetscCheckSameComm(tao, 1, g, 2);
550:     PetscCall(PetscObjectReference((PetscObject)g));
551:     PetscCall(VecDestroy(&tao->gradient));
552:     tao->gradient = g;
553:   }
554:   if (ctx) tao->user_objgradP = ctx;
555:   if (func) tao->ops->computeobjectiveandgradient = func;
556:   PetscFunctionReturn(PETSC_SUCCESS);
557: }

559: /*@C
560:   TaoGetObjectiveAndGradient - Gets the combined objective function and gradient evaluation routine for the function to be optimized

562:   Not Collective

564:   Input Parameter:
565: . tao - the `Tao` context

567:   Output Parameters:
568: + g    - the vector to internally hold the gradient computation
569: . func - the gradient function
570: - ctx  - user-defined context for private data for the gradient evaluation routine

572:   Calling sequence of `func`:
573: + tao - the optimizer
574: . x   - input vector
575: . f   - objective value (output)
576: . g   - gradient value (output)
577: - ctx - [optional] user-defined function context

579:   Level: beginner

581: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`
582: @*/
583: PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao tao, Vec x, PetscReal *f, Vec g, void *ctx), void **ctx)
584: {
585:   PetscFunctionBegin;
587:   if (g) *g = tao->gradient;
588:   if (func) *func = tao->ops->computeobjectiveandgradient;
589:   if (ctx) *ctx = tao->user_objgradP;
590:   PetscFunctionReturn(PETSC_SUCCESS);
591: }

593: /*@
594:   TaoIsObjectiveDefined - Checks to see if the user has
595:   declared an objective-only routine.  Useful for determining when
596:   it is appropriate to call `TaoComputeObjective()` or
597:   `TaoComputeObjectiveAndGradient()`

599:   Not Collective

601:   Input Parameter:
602: . tao - the `Tao` context

604:   Output Parameter:
605: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise

607:   Level: developer

609: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoIsGradientDefined()`, `TaoIsObjectiveAndGradientDefined()`
610: @*/
611: PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
612: {
613:   PetscFunctionBegin;
615:   if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE;
616:   else *flg = PETSC_TRUE;
617:   PetscFunctionReturn(PETSC_SUCCESS);
618: }

620: /*@
621:   TaoIsGradientDefined - Checks to see if the user has
622:   declared an objective-only routine.  Useful for determining when
623:   it is appropriate to call `TaoComputeGradient()` or
624:   `TaoComputeGradientAndGradient()`

626:   Not Collective

628:   Input Parameter:
629: . tao - the `Tao` context

631:   Output Parameter:
632: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise

634:   Level: developer

636: .seealso: [](ch_tao), `TaoSetGradient()`, `TaoIsObjectiveDefined()`, `TaoIsObjectiveAndGradientDefined()`
637: @*/
638: PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
639: {
640:   PetscFunctionBegin;
642:   if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE;
643:   else *flg = PETSC_TRUE;
644:   PetscFunctionReturn(PETSC_SUCCESS);
645: }

647: /*@
648:   TaoIsObjectiveAndGradientDefined - Checks to see if the user has
649:   declared a joint objective/gradient routine.  Useful for determining when
650:   it is appropriate to call `TaoComputeObjective()` or
651:   `TaoComputeObjectiveAndGradient()`

653:   Not Collective

655:   Input Parameter:
656: . tao - the `Tao` context

658:   Output Parameter:
659: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise

661:   Level: developer

663: .seealso: [](ch_tao), `TaoSetObjectiveAndGradient()`, `TaoIsObjectiveDefined()`, `TaoIsGradientDefined()`
664: @*/
665: PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
666: {
667:   PetscFunctionBegin;
669:   if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE;
670:   else *flg = PETSC_TRUE;
671:   PetscFunctionReturn(PETSC_SUCCESS);
672: }