Actual source code: tssen.c

  1: #include <petsc/private/tsimpl.h>
  2: #include <petscdraw.h>

  4: PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval;

  6: /* #define TSADJOINT_STAGE */

  8: /* ------------------------ Sensitivity Context ---------------------------*/

 10: /*@C
 11:   TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix.

 13:   Logically Collective

 15:   Input Parameters:
 16: + ts   - `TS` context obtained from `TSCreate()`
 17: . Amat - JacobianP matrix
 18: . func - function
 19: - ctx  - [optional] user-defined function context

 21:   Level: intermediate

 23:   Note:
 24:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

 26: .seealso: [](ch_ts), `TS`, `TSRHSJacobianP`, `TSGetRHSJacobianP()`
 27: @*/
 28: PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, TSRHSJacobianP func, void *ctx)
 29: {
 30:   PetscFunctionBegin;

 34:   ts->rhsjacobianp    = func;
 35:   ts->rhsjacobianpctx = ctx;
 36:   if (Amat) {
 37:     PetscCall(PetscObjectReference((PetscObject)Amat));
 38:     PetscCall(MatDestroy(&ts->Jacprhs));
 39:     ts->Jacprhs = Amat;
 40:   }
 41:   PetscFunctionReturn(PETSC_SUCCESS);
 42: }

 44: /*@C
 45:   TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix.

 47:   Logically Collective

 49:   Input Parameter:
 50: . ts - `TS` context obtained from `TSCreate()`

 52:   Output Parameters:
 53: + Amat - JacobianP matrix
 54: . func - function
 55: - ctx  - [optional] user-defined function context

 57:   Level: intermediate

 59:   Note:
 60:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

 62: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSRHSJacobianP`
 63: @*/
 64: PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, TSRHSJacobianP *func, void **ctx)
 65: {
 66:   PetscFunctionBegin;
 67:   if (func) *func = ts->rhsjacobianp;
 68:   if (ctx) *ctx = ts->rhsjacobianpctx;
 69:   if (Amat) *Amat = ts->Jacprhs;
 70:   PetscFunctionReturn(PETSC_SUCCESS);
 71: }

 73: /*@C
 74:   TSComputeRHSJacobianP - Runs the user-defined JacobianP function.

 76:   Collective

 78:   Input Parameters:
 79: + ts - The `TS` context obtained from `TSCreate()`
 80: . t  - the time
 81: - U  - the solution at which to compute the Jacobian

 83:   Output Parameter:
 84: . Amat - the computed Jacobian

 86:   Level: developer

 88: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
 89: @*/
 90: PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat)
 91: {
 92:   PetscFunctionBegin;
 93:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

 97:   if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
 98:   else {
 99:     PetscBool assembled;
100:     PetscCall(MatZeroEntries(Amat));
101:     PetscCall(MatAssembled(Amat, &assembled));
102:     if (!assembled) {
103:       PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
104:       PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
105:     }
106:   }
107:   PetscFunctionReturn(PETSC_SUCCESS);
108: }

110: /*@C
111:   TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix.

113:   Logically Collective

115:   Input Parameters:
116: + ts   - `TS` context obtained from `TSCreate()`
117: . Amat - JacobianP matrix
118: . func - function
119: - ctx  - [optional] user-defined function context

121:   Calling sequence of `func`:
122: + ts    - the `TS` context
123: . t     - current timestep
124: . U     - input vector (current ODE solution)
125: . Udot  - time derivative of state vector
126: . shift - shift to apply, see note below
127: . A     - output matrix
128: - ctx   - [optional] user-defined function context

130:   Level: intermediate

132:   Note:
133:   Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p

135: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
136: @*/
137: PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, void *ctx), void *ctx)
138: {
139:   PetscFunctionBegin;

143:   ts->ijacobianp    = func;
144:   ts->ijacobianpctx = ctx;
145:   if (Amat) {
146:     PetscCall(PetscObjectReference((PetscObject)Amat));
147:     PetscCall(MatDestroy(&ts->Jacp));
148:     ts->Jacp = Amat;
149:   }
150:   PetscFunctionReturn(PETSC_SUCCESS);
151: }

153: /*@C
154:   TSComputeIJacobianP - Runs the user-defined IJacobianP function.

156:   Collective

158:   Input Parameters:
159: + ts    - the `TS` context
160: . t     - current timestep
161: . U     - state vector
162: . Udot  - time derivative of state vector
163: . shift - shift to apply, see note below
164: - imex  - flag indicates if the method is IMEX so that the RHSJacobianP should be kept separate

166:   Output Parameter:
167: . Amat - Jacobian matrix

169:   Level: developer

171: .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()`
172: @*/
173: PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex)
174: {
175:   PetscFunctionBegin;
176:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

181:   PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0));
182:   if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx));
183:   if (imex) {
184:     if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */
185:       PetscBool assembled;
186:       PetscCall(MatZeroEntries(Amat));
187:       PetscCall(MatAssembled(Amat, &assembled));
188:       if (!assembled) {
189:         PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
190:         PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
191:       }
192:     }
193:   } else {
194:     if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs));
195:     if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
196:       PetscCall(MatScale(Amat, -1));
197:     } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
198:       MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
199:       if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
200:         PetscCall(MatZeroEntries(Amat));
201:       }
202:       PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy));
203:     }
204:   }
205:   PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0));
206:   PetscFunctionReturn(PETSC_SUCCESS);
207: }

209: /*@C
210:   TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions

212:   Logically Collective

214:   Input Parameters:
215: + ts           - the `TS` context obtained from `TSCreate()`
216: . numcost      - number of gradients to be computed, this is the number of cost functions
217: . costintegral - vector that stores the integral values
218: . rf           - routine for evaluating the integrand function
219: . drduf        - function that computes the gradients of the r with respect to u
220: . drdpf        - function that computes the gradients of the r with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`)
221: . fwd          - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
222: - ctx          - [optional] user-defined context for private data for the function evaluation routine (may be `NULL`)

224:   Calling sequence of `rf`:
225: + ts  - the integrator
226: . t   - the time
227: . U   - the solution
228: . F   - the computed value of the function
229: - ctx - the user context

231:   Calling sequence of `drduf`:
232: + ts   - the integrator
233: . t    - the time
234: . U    - the solution
235: . dRdU - the computed gradients of the r with respect to u
236: - ctx  - the user context

238:   Calling sequence of `drdpf`:
239: + ts   - the integrator
240: . t    - the time
241: . U    - the solution
242: . dRdP - the computed gradients of the r with respect to p
243: - ctx  - the user context

245:   Level: deprecated

247:   Notes:
248:   For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions

250:   Use `TSCreateQuadratureTS()` and `TSForwardSetSensitivities()` instead

252: .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`,
253:           `TSCreateQuadratureTS()`, `TSForwardSetSensitivities()`
254: @*/
255: PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS ts, PetscReal t, Vec U, Vec F, void *ctx), PetscErrorCode (*drduf)(TS ts, PetscReal t, Vec U, Vec *dRdU, void *ctx), PetscErrorCode (*drdpf)(TS ts, PetscReal t, Vec U, Vec *dRdP, void *ctx), PetscBool fwd, void *ctx)
256: {
257:   PetscFunctionBegin;
260:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
261:   if (!ts->numcost) ts->numcost = numcost;

263:   if (costintegral) {
264:     PetscCall(PetscObjectReference((PetscObject)costintegral));
265:     PetscCall(VecDestroy(&ts->vec_costintegral));
266:     ts->vec_costintegral = costintegral;
267:   } else {
268:     if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
269:       PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral));
270:     } else {
271:       PetscCall(VecSet(ts->vec_costintegral, 0.0));
272:     }
273:   }
274:   if (!ts->vec_costintegrand) {
275:     PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand));
276:   } else {
277:     PetscCall(VecSet(ts->vec_costintegrand, 0.0));
278:   }
279:   ts->costintegralfwd  = fwd; /* Evaluate the cost integral in forward run if fwd is true */
280:   ts->costintegrand    = rf;
281:   ts->costintegrandctx = ctx;
282:   ts->drdufunction     = drduf;
283:   ts->drdpfunction     = drdpf;
284:   PetscFunctionReturn(PETSC_SUCCESS);
285: }

287: /*@C
288:   TSGetCostIntegral - Returns the values of the integral term in the cost functions.
289:   It is valid to call the routine after a backward run.

291:   Not Collective

293:   Input Parameter:
294: . ts - the `TS` context obtained from `TSCreate()`

296:   Output Parameter:
297: . v - the vector containing the integrals for each cost function

299:   Level: intermediate

301: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, ``TSSetCostIntegrand()`
302: @*/
303: PetscErrorCode TSGetCostIntegral(TS ts, Vec *v)
304: {
305:   TS quadts;

307:   PetscFunctionBegin;
309:   PetscAssertPointer(v, 2);
310:   PetscCall(TSGetQuadratureTS(ts, NULL, &quadts));
311:   *v = quadts->vec_sol;
312:   PetscFunctionReturn(PETSC_SUCCESS);
313: }

315: /*@C
316:   TSComputeCostIntegrand - Evaluates the integral function in the cost functions.

318:   Input Parameters:
319: + ts - the `TS` context
320: . t  - current time
321: - U  - state vector, i.e. current solution

323:   Output Parameter:
324: . Q - vector of size numcost to hold the outputs

326:   Level: deprecated

328:   Note:
329:   Most users should not need to explicitly call this routine, as it
330:   is used internally within the sensitivity analysis context.

332: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
333: @*/
334: PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q)
335: {
336:   PetscFunctionBegin;

341:   PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0));
342:   if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx));
343:   else PetscCall(VecZeroEntries(Q));
344:   PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0));
345:   PetscFunctionReturn(PETSC_SUCCESS);
346: }

348: // PetscClangLinter pragma disable: -fdoc-*
349: /*@C
350:   TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

352:   Level: deprecated

354: @*/
355: PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
356: {
357:   PetscFunctionBegin;
358:   if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS);

362:   PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
363:   PetscFunctionReturn(PETSC_SUCCESS);
364: }

366: // PetscClangLinter pragma disable: -fdoc-*
367: /*@C
368:   TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

370:   Level: deprecated

372: @*/
373: PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
374: {
375:   PetscFunctionBegin;
376:   if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS);

380:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
381:   PetscFunctionReturn(PETSC_SUCCESS);
382: }

384: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
385: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
386: /*@C
387:   TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable.

389:   Logically Collective

391:   Input Parameters:
392: + ts   - `TS` context obtained from `TSCreate()`
393: . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU
394: . hessianproductfunc1 - vector-Hessian-vector product function for F_UU
395: . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP
396: . hessianproductfunc2 - vector-Hessian-vector product function for F_UP
397: . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU
398: . hessianproductfunc3 - vector-Hessian-vector product function for F_PU
399: . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP
400: - hessianproductfunc4 - vector-Hessian-vector product function for F_PP

402:   Calling sequence of `ihessianproductfunc1`:
403: + ts  - the `TS` context
404: + t   - current timestep
405: . U   - input vector (current ODE solution)
406: . Vl  - an array of input vectors to be left-multiplied with the Hessian
407: . Vr  - input vector to be right-multiplied with the Hessian
408: . VHV - an array of output vectors for vector-Hessian-vector product
409: - ctx - [optional] user-defined function context

411:   Level: intermediate

413:   Notes:
414:   All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
415:   descriptions are omitted for brevity.

417:   The first Hessian function and the working array are required.
418:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
419:   $ Vl_n^T*F_UP*Vr
420:   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M.
421:   Each entry of F_UP corresponds to the derivative
422:   $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.
423:   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being
424:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}
425:   If the cost function is a scalar, there will be only one vector in Vl and VHV.

427: .seealso: [](ch_ts), `TS`
428: @*/
429: PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
430: {
431:   PetscFunctionBegin;
433:   PetscAssertPointer(ihp1, 2);

435:   ts->ihessianproductctx = ctx;
436:   if (ihp1) ts->vecs_fuu = ihp1;
437:   if (ihp2) ts->vecs_fup = ihp2;
438:   if (ihp3) ts->vecs_fpu = ihp3;
439:   if (ihp4) ts->vecs_fpp = ihp4;
440:   ts->ihessianproduct_fuu = ihessianproductfunc1;
441:   ts->ihessianproduct_fup = ihessianproductfunc2;
442:   ts->ihessianproduct_fpu = ihessianproductfunc3;
443:   ts->ihessianproduct_fpp = ihessianproductfunc4;
444:   PetscFunctionReturn(PETSC_SUCCESS);
445: }

447: /*@C
448:   TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.

450:   Collective

452:   Input Parameters:
453: + ts - The `TS` context obtained from `TSCreate()`
454: . t  - the time
455: . U  - the solution at which to compute the Hessian product
456: . Vl - the array of input vectors to be multiplied with the Hessian from the left
457: - Vr - the input vector to be multiplied with the Hessian from the right

459:   Output Parameter:
460: . VHV - the array of output vectors that store the Hessian product

462:   Level: developer

464:   Note:
465:   `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation,
466:   so most users would not generally call this routine themselves.

468: .seealso: [](ch_ts), `TSSetIHessianProduct()`
469: @*/
470: PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
471: {
472:   PetscFunctionBegin;
473:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

477:   if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

479:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
480:   if (ts->rhshessianproduct_guu) {
481:     PetscInt nadj;
482:     PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV));
483:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
484:   }
485:   PetscFunctionReturn(PETSC_SUCCESS);
486: }

488: /*@C
489:   TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.

491:   Collective

493:   Input Parameters:
494: + ts - The `TS` context obtained from `TSCreate()`
495: . t  - the time
496: . U  - the solution at which to compute the Hessian product
497: . Vl - the array of input vectors to be multiplied with the Hessian from the left
498: - Vr - the input vector to be multiplied with the Hessian from the right

500:   Output Parameter:
501: . VHV - the array of output vectors that store the Hessian product

503:   Level: developer

505:   Note:
506:   `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation,
507:   so most users would not generally call this routine themselves.

509: .seealso: [](ch_ts), `TSSetIHessianProduct()`
510: @*/
511: PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
512: {
513:   PetscFunctionBegin;
514:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

518:   if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

520:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
521:   if (ts->rhshessianproduct_gup) {
522:     PetscInt nadj;
523:     PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV));
524:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
525:   }
526:   PetscFunctionReturn(PETSC_SUCCESS);
527: }

529: /*@C
530:   TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.

532:   Collective

534:   Input Parameters:
535: + ts - The `TS` context obtained from `TSCreate()`
536: . t  - the time
537: . U  - the solution at which to compute the Hessian product
538: . Vl - the array of input vectors to be multiplied with the Hessian from the left
539: - Vr - the input vector to be multiplied with the Hessian from the right

541:   Output Parameter:
542: . VHV - the array of output vectors that store the Hessian product

544:   Level: developer

546:   Note:
547:   `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation,
548:   so most users would not generally call this routine themselves.

550: .seealso: [](ch_ts), `TSSetIHessianProduct()`
551: @*/
552: PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
553: {
554:   PetscFunctionBegin;
555:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

559:   if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

561:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
562:   if (ts->rhshessianproduct_gpu) {
563:     PetscInt nadj;
564:     PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV));
565:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
566:   }
567:   PetscFunctionReturn(PETSC_SUCCESS);
568: }

570: /*@C
571:   TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.

573:   Collective

575:   Input Parameters:
576: + ts - The `TS` context obtained from `TSCreate()`
577: . t  - the time
578: . U  - the solution at which to compute the Hessian product
579: . Vl - the array of input vectors to be multiplied with the Hessian from the left
580: - Vr - the input vector to be multiplied with the Hessian from the right

582:   Output Parameter:
583: . VHV - the array of output vectors that store the Hessian product

585:   Level: developer

587:   Note:
588:   `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation,
589:   so most users would not generally call this routine themselves.

591: .seealso: [](ch_ts), `TSSetIHessianProduct()`
592: @*/
593: PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
594: {
595:   PetscFunctionBegin;
596:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

600:   if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

602:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
603:   if (ts->rhshessianproduct_gpp) {
604:     PetscInt nadj;
605:     PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV));
606:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
607:   }
608:   PetscFunctionReturn(PETSC_SUCCESS);
609: }

611: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
612: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
613: /*@C
614:   TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector
615:   product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state
616:   variable.

618:   Logically Collective

620:   Input Parameters:
621: + ts     - `TS` context obtained from `TSCreate()`
622: . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU
623: . hessianproductfunc1 - vector-Hessian-vector product function for G_UU
624: . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP
625: . hessianproductfunc2 - vector-Hessian-vector product function for G_UP
626: . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU
627: . hessianproductfunc3 - vector-Hessian-vector product function for G_PU
628: . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP
629: . hessianproductfunc4 - vector-Hessian-vector product function for G_PP
630: - ctx    - [optional] user-defined function context

632:   Calling sequence of `rhshessianproductfunc1`:
633: + ts  - the `TS` context
634: . t   - current timestep
635: . U   - input vector (current ODE solution)
636: . Vl  - an array of input vectors to be left-multiplied with the Hessian
637: . Vr  - input vector to be right-multiplied with the Hessian
638: . VHV - an array of output vectors for vector-Hessian-vector product
639: - ctx - [optional] user-defined function context

641:   Level: intermediate

643:   Notes:
644:   All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
645:   descriptions are omitted for brevity.

647:   The first Hessian function and the working array are required.

649:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
650:   $ Vl_n^T*G_UP*Vr
651:   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M.
652:   Each entry of G_UP corresponds to the derivative
653:   $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.
654:   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being
655:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}
656:   If the cost function is a scalar, there will be only one vector in Vl and VHV.

658: .seealso: `TS`, `TSAdjoint`
659: @*/
660: PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *rhshp2, PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp3, PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp4, PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
661: {
662:   PetscFunctionBegin;
664:   PetscAssertPointer(rhshp1, 2);

666:   ts->rhshessianproductctx = ctx;
667:   if (rhshp1) ts->vecs_guu = rhshp1;
668:   if (rhshp2) ts->vecs_gup = rhshp2;
669:   if (rhshp3) ts->vecs_gpu = rhshp3;
670:   if (rhshp4) ts->vecs_gpp = rhshp4;
671:   ts->rhshessianproduct_guu = rhshessianproductfunc1;
672:   ts->rhshessianproduct_gup = rhshessianproductfunc2;
673:   ts->rhshessianproduct_gpu = rhshessianproductfunc3;
674:   ts->rhshessianproduct_gpp = rhshessianproductfunc4;
675:   PetscFunctionReturn(PETSC_SUCCESS);
676: }

678: /*@C
679:   TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu.

681:   Collective

683:   Input Parameters:
684: + ts - The `TS` context obtained from `TSCreate()`
685: . t  - the time
686: . U  - the solution at which to compute the Hessian product
687: . Vl - the array of input vectors to be multiplied with the Hessian from the left
688: - Vr - the input vector to be multiplied with the Hessian from the right

690:   Output Parameter:
691: . VHV - the array of output vectors that store the Hessian product

693:   Level: developer

695:   Note:
696:   `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation,
697:   so most users would not generally call this routine themselves.

699: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
700: @*/
701: PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
702: {
703:   PetscFunctionBegin;
704:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

708:   PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
709:   PetscFunctionReturn(PETSC_SUCCESS);
710: }

712: /*@C
713:   TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup.

715:   Collective

717:   Input Parameters:
718: + ts - The `TS` context obtained from `TSCreate()`
719: . t  - the time
720: . U  - the solution at which to compute the Hessian product
721: . Vl - the array of input vectors to be multiplied with the Hessian from the left
722: - Vr - the input vector to be multiplied with the Hessian from the right

724:   Output Parameter:
725: . VHV - the array of output vectors that store the Hessian product

727:   Level: developer

729:   Note:
730:   `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation,
731:   so most users would not generally call this routine themselves.

733: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
734: @*/
735: PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
736: {
737:   PetscFunctionBegin;
738:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

742:   PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
743:   PetscFunctionReturn(PETSC_SUCCESS);
744: }

746: /*@C
747:   TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu.

749:   Collective

751:   Input Parameters:
752: + ts - The `TS` context obtained from `TSCreate()`
753: . t  - the time
754: . U  - the solution at which to compute the Hessian product
755: . Vl - the array of input vectors to be multiplied with the Hessian from the left
756: - Vr - the input vector to be multiplied with the Hessian from the right

758:   Output Parameter:
759: . VHV - the array of output vectors that store the Hessian product

761:   Level: developer

763:   Note:
764:   `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation,
765:   so most users would not generally call this routine themselves.

767: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
768: @*/
769: PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
770: {
771:   PetscFunctionBegin;
772:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

776:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
777:   PetscFunctionReturn(PETSC_SUCCESS);
778: }

780: /*@C
781:   TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp.

783:   Collective

785:   Input Parameters:
786: + ts - The `TS` context obtained from `TSCreate()`
787: . t  - the time
788: . U  - the solution at which to compute the Hessian product
789: . Vl - the array of input vectors to be multiplied with the Hessian from the left
790: - Vr - the input vector to be multiplied with the Hessian from the right

792:   Output Parameter:
793: . VHV - the array of output vectors that store the Hessian product

795:   Level: developer

797:   Note:
798:   `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation,
799:   so most users would not generally call this routine themselves.

801: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
802: @*/
803: PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
804: {
805:   PetscFunctionBegin;
806:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

810:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
811:   PetscFunctionReturn(PETSC_SUCCESS);
812: }

814: /* --------------------------- Adjoint sensitivity ---------------------------*/

816: /*@
817:   TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
818:   for use by the `TS` adjoint routines.

820:   Logically Collective

822:   Input Parameters:
823: + ts      - the `TS` context obtained from `TSCreate()`
824: . numcost - number of gradients to be computed, this is the number of cost functions
825: . lambda  - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
826: - mu      - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

828:   Level: beginner

830:   Notes:
831:   the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime  mu_i = df/dp|finaltime

833:   After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities

835: .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()`
836: @*/
837: PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec *lambda, Vec *mu)
838: {
839:   PetscFunctionBegin;
841:   PetscAssertPointer(lambda, 3);
842:   ts->vecs_sensi  = lambda;
843:   ts->vecs_sensip = mu;
844:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
845:   ts->numcost = numcost;
846:   PetscFunctionReturn(PETSC_SUCCESS);
847: }

849: /*@
850:   TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()`

852:   Not Collective, but the vectors returned are parallel if `TS` is parallel

854:   Input Parameter:
855: . ts - the `TS` context obtained from `TSCreate()`

857:   Output Parameters:
858: + numcost - size of returned arrays
859: . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
860: - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters

862:   Level: intermediate

864: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()`
865: @*/
866: PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec **lambda, Vec **mu)
867: {
868:   PetscFunctionBegin;
870:   if (numcost) *numcost = ts->numcost;
871:   if (lambda) *lambda = ts->vecs_sensi;
872:   if (mu) *mu = ts->vecs_sensip;
873:   PetscFunctionReturn(PETSC_SUCCESS);
874: }

876: /*@
877:   TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
878:   for use by the `TS` adjoint routines.

880:   Logically Collective

882:   Input Parameters:
883: + ts      - the `TS` context obtained from `TSCreate()`
884: . numcost - number of cost functions
885: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
886: . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
887: - dir     - the direction vector that are multiplied with the Hessian of the cost functions

889:   Level: beginner

891:   Notes:
892:   Hessian of the cost function is completely different from Hessian of the ODE/DAE system

894:   For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`.

896:   After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.

898:   Passing `NULL` for `lambda2` disables the second-order calculation.

900: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()`
901: @*/
902: PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec *lambda2, Vec *mu2, Vec dir)
903: {
904:   PetscFunctionBegin;
906:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
907:   ts->numcost      = numcost;
908:   ts->vecs_sensi2  = lambda2;
909:   ts->vecs_sensi2p = mu2;
910:   ts->vec_dir      = dir;
911:   PetscFunctionReturn(PETSC_SUCCESS);
912: }

914: /*@
915:   TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()`

917:   Not Collective, but vectors returned are parallel if `TS` is parallel

919:   Input Parameter:
920: . ts - the `TS` context obtained from `TSCreate()`

922:   Output Parameters:
923: + numcost - number of cost functions
924: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
925: . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
926: - dir     - the direction vector that are multiplied with the Hessian of the cost functions

928:   Level: intermediate

930: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`
931: @*/
932: PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec **lambda2, Vec **mu2, Vec *dir)
933: {
934:   PetscFunctionBegin;
936:   if (numcost) *numcost = ts->numcost;
937:   if (lambda2) *lambda2 = ts->vecs_sensi2;
938:   if (mu2) *mu2 = ts->vecs_sensi2p;
939:   if (dir) *dir = ts->vec_dir;
940:   PetscFunctionReturn(PETSC_SUCCESS);
941: }

943: /*@
944:   TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities

946:   Logically Collective

948:   Input Parameters:
949: + ts   - the `TS` context obtained from `TSCreate()`
950: - didp - the derivative of initial values w.r.t. parameters

952:   Level: intermediate

954:   Notes:
955:   When computing sensitivities w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically.
956:   `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver.

958: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()`
959: @*/
960: PetscErrorCode TSAdjointSetForward(TS ts, Mat didp)
961: {
962:   Mat          A;
963:   Vec          sp;
964:   PetscScalar *xarr;
965:   PetscInt     lsize;

967:   PetscFunctionBegin;
968:   ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
969:   PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first");
970:   PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
971:   /* create a single-column dense matrix */
972:   PetscCall(VecGetLocalSize(ts->vec_sol, &lsize));
973:   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A));

975:   PetscCall(VecDuplicate(ts->vec_sol, &sp));
976:   PetscCall(MatDenseGetColumn(A, 0, &xarr));
977:   PetscCall(VecPlaceArray(sp, xarr));
978:   if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
979:     if (didp) {
980:       PetscCall(MatMult(didp, ts->vec_dir, sp));
981:       PetscCall(VecScale(sp, 2.));
982:     } else {
983:       PetscCall(VecZeroEntries(sp));
984:     }
985:   } else { /* tangent linear variable initialized as dir */
986:     PetscCall(VecCopy(ts->vec_dir, sp));
987:   }
988:   PetscCall(VecResetArray(sp));
989:   PetscCall(MatDenseRestoreColumn(A, &xarr));
990:   PetscCall(VecDestroy(&sp));

992:   PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */

994:   PetscCall(MatDestroy(&A));
995:   PetscFunctionReturn(PETSC_SUCCESS);
996: }

998: /*@
999:   TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context

1001:   Logically Collective

1003:   Input Parameter:
1004: . ts - the `TS` context obtained from `TSCreate()`

1006:   Level: intermediate

1008: .seealso: [](ch_ts), `TSAdjointSetForward()`
1009: @*/
1010: PetscErrorCode TSAdjointResetForward(TS ts)
1011: {
1012:   PetscFunctionBegin;
1013:   ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
1014:   PetscCall(TSForwardReset(ts));
1015:   PetscFunctionReturn(PETSC_SUCCESS);
1016: }

1018: /*@
1019:   TSAdjointSetUp - Sets up the internal data structures for the later use
1020:   of an adjoint solver

1022:   Collective

1024:   Input Parameter:
1025: . ts - the `TS` context obtained from `TSCreate()`

1027:   Level: advanced

1029: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()`
1030: @*/
1031: PetscErrorCode TSAdjointSetUp(TS ts)
1032: {
1033:   TSTrajectory tj;
1034:   PetscBool    match;

1036:   PetscFunctionBegin;
1038:   if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1039:   PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first");
1040:   PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
1041:   PetscCall(TSGetTrajectory(ts, &tj));
1042:   PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match));
1043:   if (match) {
1044:     PetscBool solution_only;
1045:     PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only));
1046:     PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
1047:   }
1048:   PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */

1050:   if (ts->quadraturets) { /* if there is integral in the cost function */
1051:     PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col));
1052:     if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col));
1053:   }

1055:   PetscTryTypeMethod(ts, adjointsetup);
1056:   ts->adjointsetupcalled = PETSC_TRUE;
1057:   PetscFunctionReturn(PETSC_SUCCESS);
1058: }

1060: /*@
1061:   TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s.

1063:   Collective

1065:   Input Parameter:
1066: . ts - the `TS` context obtained from `TSCreate()`

1068:   Level: beginner

1070: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()`
1071: @*/
1072: PetscErrorCode TSAdjointReset(TS ts)
1073: {
1074:   PetscFunctionBegin;
1076:   PetscTryTypeMethod(ts, adjointreset);
1077:   if (ts->quadraturets) { /* if there is integral in the cost function */
1078:     PetscCall(VecDestroy(&ts->vec_drdu_col));
1079:     if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col));
1080:   }
1081:   ts->vecs_sensi         = NULL;
1082:   ts->vecs_sensip        = NULL;
1083:   ts->vecs_sensi2        = NULL;
1084:   ts->vecs_sensi2p       = NULL;
1085:   ts->vec_dir            = NULL;
1086:   ts->adjointsetupcalled = PETSC_FALSE;
1087:   PetscFunctionReturn(PETSC_SUCCESS);
1088: }

1090: /*@
1091:   TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time

1093:   Logically Collective

1095:   Input Parameters:
1096: + ts    - the `TS` context obtained from `TSCreate()`
1097: - steps - number of steps to use

1099:   Level: intermediate

1101:   Notes:
1102:   Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this
1103:   so as to integrate back to less than the original timestep

1105: .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()`
1106: @*/
1107: PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps)
1108: {
1109:   PetscFunctionBegin;
1112:   PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps");
1113:   PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps");
1114:   ts->adjoint_max_steps = steps;
1115:   PetscFunctionReturn(PETSC_SUCCESS);
1116: }

1118: // PetscClangLinter pragma disable: -fdoc-*
1119: /*@C
1120:   TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()`

1122:   Level: deprecated
1123: @*/
1124: PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx)
1125: {
1126:   PetscFunctionBegin;

1130:   ts->rhsjacobianp    = func;
1131:   ts->rhsjacobianpctx = ctx;
1132:   if (Amat) {
1133:     PetscCall(PetscObjectReference((PetscObject)Amat));
1134:     PetscCall(MatDestroy(&ts->Jacp));
1135:     ts->Jacp = Amat;
1136:   }
1137:   PetscFunctionReturn(PETSC_SUCCESS);
1138: }

1140: // PetscClangLinter pragma disable: -fdoc-*
1141: /*@C
1142:   TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()`

1144:   Level: deprecated
1145: @*/
1146: PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat)
1147: {
1148:   PetscFunctionBegin;

1153:   PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
1154:   PetscFunctionReturn(PETSC_SUCCESS);
1155: }

1157: // PetscClangLinter pragma disable: -fdoc-*
1158: /*@
1159:   TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

1161:   Level: deprecated
1162: @*/
1163: PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
1164: {
1165:   PetscFunctionBegin;

1169:   PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
1170:   PetscFunctionReturn(PETSC_SUCCESS);
1171: }

1173: // PetscClangLinter pragma disable: -fdoc-*
1174: /*@
1175:   TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

1177:   Level: deprecated
1178: @*/
1179: PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
1180: {
1181:   PetscFunctionBegin;

1185:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
1186:   PetscFunctionReturn(PETSC_SUCCESS);
1187: }

1189: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
1190: /*@C
1191:   TSAdjointMonitorSensi - monitors the first lambda sensitivity

1193:   Level: intermediate

1195: .seealso: [](ch_ts), `TSAdjointMonitorSet()`
1196: @*/
1197: static PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1198: {
1199:   PetscViewer viewer = vf->viewer;

1201:   PetscFunctionBegin;
1203:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1204:   PetscCall(VecView(lambda[0], viewer));
1205:   PetscCall(PetscViewerPopFormat(viewer));
1206:   PetscFunctionReturn(PETSC_SUCCESS);
1207: }

1209: /*@C
1210:   TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user

1212:   Collective

1214:   Input Parameters:
1215: + ts           - `TS` object you wish to monitor
1216: . name         - the monitor type one is seeking
1217: . help         - message indicating what monitoring is done
1218: . manual       - manual page for the monitor
1219: . monitor      - the monitor function
1220: - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects

1222:   Level: developer

1224: .seealso: [](ch_ts), `PetscOptionsGetViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`,
1225:           `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()`
1226:           `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`,
1227:           `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`,
1228:           `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`,
1229:           `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`,
1230:           `PetscOptionsFList()`, `PetscOptionsEList()`
1231: @*/
1232: PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *))
1233: {
1234:   PetscViewer       viewer;
1235:   PetscViewerFormat format;
1236:   PetscBool         flg;

1238:   PetscFunctionBegin;
1239:   PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg));
1240:   if (flg) {
1241:     PetscViewerAndFormat *vf;
1242:     PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf));
1243:     PetscCall(PetscObjectDereference((PetscObject)viewer));
1244:     if (monitorsetup) PetscCall((*monitorsetup)(ts, vf));
1245:     PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode(*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscErrorCode(*)(void **))PetscViewerAndFormatDestroy));
1246:   }
1247:   PetscFunctionReturn(PETSC_SUCCESS);
1248: }

1250: /*@C
1251:   TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1252:   timestep to display the iteration's  progress.

1254:   Logically Collective

1256:   Input Parameters:
1257: + ts              - the `TS` context obtained from `TSCreate()`
1258: . adjointmonitor  - monitoring routine
1259: . adjointmctx     - [optional] user-defined context for private data for the monitor routine
1260:                     (use `NULL` if no context is desired)
1261: - adjointmdestroy - [optional] routine that frees monitor context (may be `NULL`)

1263:   Calling sequence of `adjointmonitor`:
1264: + ts          - the `TS` context
1265: . steps       - iteration number (after the final time step the monitor routine is called with
1266:                 a step of -1, this is at the final time which may have been interpolated to)
1267: . time        - current time
1268: . u           - current iterate
1269: . numcost     - number of cost functionos
1270: . lambda      - sensitivities to initial conditions
1271: . mu          - sensitivities to parameters
1272: - adjointmctx - [optional] adjoint monitoring context

1274:   Calling sequence of `adjointmdestroy`:
1275: . mctx - the monitor context to destroy

1277:   Level: intermediate

1279:   Note:
1280:   This routine adds an additional monitor to the list of monitors that
1281:   already has been loaded.

1283:   Fortran Notes:
1284:   Only a single monitor function can be set for each `TS` object

1286: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()`
1287: @*/
1288: PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *adjointmctx), void *adjointmctx, PetscErrorCode (*adjointmdestroy)(void **mctx))
1289: {
1290:   PetscInt  i;
1291:   PetscBool identical;

1293:   PetscFunctionBegin;
1295:   for (i = 0; i < ts->numbermonitors; i++) {
1296:     PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode(*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical));
1297:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
1298:   }
1299:   PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set");
1300:   ts->adjointmonitor[ts->numberadjointmonitors]          = adjointmonitor;
1301:   ts->adjointmonitordestroy[ts->numberadjointmonitors]   = adjointmdestroy;
1302:   ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void *)adjointmctx;
1303:   PetscFunctionReturn(PETSC_SUCCESS);
1304: }

1306: /*@C
1307:   TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.

1309:   Logically Collective

1311:   Input Parameter:
1312: . ts - the `TS` context obtained from `TSCreate()`

1314:   Notes:
1315:   There is no way to remove a single, specific monitor.

1317:   Level: intermediate

1319: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1320: @*/
1321: PetscErrorCode TSAdjointMonitorCancel(TS ts)
1322: {
1323:   PetscInt i;

1325:   PetscFunctionBegin;
1327:   for (i = 0; i < ts->numberadjointmonitors; i++) {
1328:     if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1329:   }
1330:   ts->numberadjointmonitors = 0;
1331:   PetscFunctionReturn(PETSC_SUCCESS);
1332: }

1334: /*@C
1335:   TSAdjointMonitorDefault - the default monitor of adjoint computations

1337:   Input Parameters:
1338: + ts      - the `TS` context
1339: . step    - iteration number (after the final time step the monitor routine is called with a
1340: step of -1, this is at the final time which may have been interpolated to)
1341: . time    - current time
1342: . v       - current iterate
1343: . numcost - number of cost functionos
1344: . lambda  - sensitivities to initial conditions
1345: . mu      - sensitivities to parameters
1346: - vf      - the viewer and format

1348:   Level: intermediate

1350: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1351: @*/
1352: PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal time, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1353: {
1354:   PetscViewer viewer = vf->viewer;

1356:   PetscFunctionBegin;
1357:   (void)v;
1358:   (void)numcost;
1359:   (void)lambda;
1360:   (void)mu;
1362:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1363:   PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel));
1364:   PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)time, ts->steprollback ? " (r)\n" : "\n"));
1365:   PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel));
1366:   PetscCall(PetscViewerPopFormat(viewer));
1367:   PetscFunctionReturn(PETSC_SUCCESS);
1368: }

1370: /*@C
1371:   TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling
1372:   `VecView()` for the sensitivities to initial states at each timestep

1374:   Collective

1376:   Input Parameters:
1377: + ts      - the `TS` context
1378: . step    - current time-step
1379: . ptime   - current time
1380: . u       - current state
1381: . numcost - number of cost functions
1382: . lambda  - sensitivities to initial conditions
1383: . mu      - sensitivities to parameters
1384: - dummy   - either a viewer or `NULL`

1386:   Level: intermediate

1388: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()`
1389: @*/
1390: PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *dummy)
1391: {
1392:   TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1393:   PetscDraw        draw;
1394:   PetscReal        xl, yl, xr, yr, h;
1395:   char             time[32];

1397:   PetscFunctionBegin;
1398:   if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS);

1400:   PetscCall(VecView(lambda[0], ictx->viewer));
1401:   PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw));
1402:   PetscCall(PetscSNPrintf(time, 32, "Timestep %d Time %g", (int)step, (double)ptime));
1403:   PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
1404:   h = yl + .95 * (yr - yl);
1405:   PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time));
1406:   PetscCall(PetscDrawFlush(draw));
1407:   PetscFunctionReturn(PETSC_SUCCESS);
1408: }

1410: /*@C
1411:   TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from options database.

1413:   Collective

1415:   Input Parameters:
1416: + ts                 - the `TS` context
1417: - PetscOptionsObject - the options context

1419:   Options Database Keys:
1420: + -ts_adjoint_solve <yes,no>     - After solving the ODE/DAE solve the adjoint problem (requires `-ts_save_trajectory`)
1421: . -ts_adjoint_monitor            - print information at each adjoint time step
1422: - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically

1424:   Level: developer

1426:   Note:
1427:   This is not normally called directly by users

1429: .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()`
1430: @*/
1431: PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems *PetscOptionsObject)
1432: {
1433:   PetscBool tflg, opt;

1435:   PetscFunctionBegin;
1437:   PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options");
1438:   tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1439:   PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt));
1440:   if (opt) {
1441:     PetscCall(TSSetSaveTrajectory(ts));
1442:     ts->adjoint_solve = tflg;
1443:   }
1444:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL));
1445:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL));
1446:   opt = PETSC_FALSE;
1447:   PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt));
1448:   if (opt) {
1449:     TSMonitorDrawCtx ctx;
1450:     PetscInt         howoften = 1;

1452:     PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL));
1453:     PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx));
1454:     PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscErrorCode(*)(void **))TSMonitorDrawCtxDestroy));
1455:   }
1456:   PetscFunctionReturn(PETSC_SUCCESS);
1457: }

1459: /*@
1460:   TSAdjointStep - Steps one time step backward in the adjoint run

1462:   Collective

1464:   Input Parameter:
1465: . ts - the `TS` context obtained from `TSCreate()`

1467:   Level: intermediate

1469: .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()`
1470: @*/
1471: PetscErrorCode TSAdjointStep(TS ts)
1472: {
1473:   DM dm;

1475:   PetscFunctionBegin;
1477:   PetscCall(TSGetDM(ts, &dm));
1478:   PetscCall(TSAdjointSetUp(ts));
1479:   ts->steps--; /* must decrease the step index before the adjoint step is taken. */

1481:   ts->reason     = TS_CONVERGED_ITERATING;
1482:   ts->ptime_prev = ts->ptime;
1483:   PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0));
1484:   PetscUseTypeMethod(ts, adjointstep);
1485:   PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0));
1486:   ts->adjoint_steps++;

1488:   if (ts->reason < 0) {
1489:     PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]);
1490:   } else if (!ts->reason) {
1491:     if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1492:   }
1493:   PetscFunctionReturn(PETSC_SUCCESS);
1494: }

1496: /*@
1497:   TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE

1499:   Collective
1500:   `

1502:   Input Parameter:
1503: . ts - the `TS` context obtained from `TSCreate()`

1505:   Options Database Key:
1506: . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values

1508:   Level: intermediate

1510:   Notes:
1511:   This must be called after a call to `TSSolve()` that solves the forward problem

1513:   By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time

1515: .seealso: [](ch_ts), `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()`
1516: @*/
1517: PetscErrorCode TSAdjointSolve(TS ts)
1518: {
1519:   static PetscBool cite = PETSC_FALSE;
1520: #if defined(TSADJOINT_STAGE)
1521:   PetscLogStage adjoint_stage;
1522: #endif

1524:   PetscFunctionBegin;
1526:   PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n"
1527:                                    "  title         = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n"
1528:                                    "  author        = {Zhang, Hong and Constantinescu, Emil M.  and Smith, Barry F.},\n"
1529:                                    "  journal       = {SIAM Journal on Scientific Computing},\n"
1530:                                    "  volume        = {44},\n"
1531:                                    "  number        = {1},\n"
1532:                                    "  pages         = {C1-C24},\n"
1533:                                    "  doi           = {10.1137/21M140078X},\n"
1534:                                    "  year          = {2022}\n}\n",
1535:                                    &cite));
1536: #if defined(TSADJOINT_STAGE)
1537:   PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage));
1538:   PetscCall(PetscLogStagePush(adjoint_stage));
1539: #endif
1540:   PetscCall(TSAdjointSetUp(ts));

1542:   /* reset time step and iteration counters */
1543:   ts->adjoint_steps     = 0;
1544:   ts->ksp_its           = 0;
1545:   ts->snes_its          = 0;
1546:   ts->num_snes_failures = 0;
1547:   ts->reject            = 0;
1548:   ts->reason            = TS_CONVERGED_ITERATING;

1550:   if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1551:   if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;

1553:   while (!ts->reason) {
1554:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1555:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1556:     PetscCall(TSAdjointEventHandler(ts));
1557:     PetscCall(TSAdjointStep(ts));
1558:     if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts));
1559:   }
1560:   if (!ts->steps) {
1561:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1562:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1563:   }
1564:   ts->solvetime = ts->ptime;
1565:   PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view"));
1566:   PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution"));
1567:   ts->adjoint_max_steps = 0;
1568: #if defined(TSADJOINT_STAGE)
1569:   PetscCall(PetscLogStagePop());
1570: #endif
1571:   PetscFunctionReturn(PETSC_SUCCESS);
1572: }

1574: /*@C
1575:   TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()`

1577:   Collective

1579:   Input Parameters:
1580: + ts      - time stepping context obtained from `TSCreate()`
1581: . step    - step number that has just completed
1582: . ptime   - model time of the state
1583: . u       - state at the current model time
1584: . numcost - number of cost functions (dimension of lambda  or mu)
1585: . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1586: - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters

1588:   Level: developer

1590:   Note:
1591:   `TSAdjointMonitor()` is typically used automatically within the time stepping implementations.
1592:   Users would almost never call this routine directly.

1594: .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()`
1595: @*/
1596: PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu)
1597: {
1598:   PetscInt i, n = ts->numberadjointmonitors;

1600:   PetscFunctionBegin;
1603:   PetscCall(VecLockReadPush(u));
1604:   for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i]));
1605:   PetscCall(VecLockReadPop(u));
1606:   PetscFunctionReturn(PETSC_SUCCESS);
1607: }

1609: /*@
1610:   TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.

1612:   Collective

1614:   Input Parameter:
1615: . ts - time stepping context

1617:   Level: advanced

1619:   Notes:
1620:   This function cannot be called until `TSAdjointStep()` has been completed.

1622: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()`
1623:  @*/
1624: PetscErrorCode TSAdjointCostIntegral(TS ts)
1625: {
1626:   PetscFunctionBegin;
1628:   PetscUseTypeMethod(ts, adjointintegral);
1629:   PetscFunctionReturn(PETSC_SUCCESS);
1630: }

1632: /* ------------------ Forward (tangent linear) sensitivity  ------------------*/

1634: /*@
1635:   TSForwardSetUp - Sets up the internal data structures for the later use
1636:   of forward sensitivity analysis

1638:   Collective

1640:   Input Parameter:
1641: . ts - the `TS` context obtained from `TSCreate()`

1643:   Level: advanced

1645: .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()`
1646: @*/
1647: PetscErrorCode TSForwardSetUp(TS ts)
1648: {
1649:   PetscFunctionBegin;
1651:   if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1652:   PetscTryTypeMethod(ts, forwardsetup);
1653:   PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col));
1654:   ts->forwardsetupcalled = PETSC_TRUE;
1655:   PetscFunctionReturn(PETSC_SUCCESS);
1656: }

1658: /*@
1659:   TSForwardReset - Reset the internal data structures used by forward sensitivity analysis

1661:   Collective

1663:   Input Parameter:
1664: . ts - the `TS` context obtained from `TSCreate()`

1666:   Level: advanced

1668: .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()`
1669: @*/
1670: PetscErrorCode TSForwardReset(TS ts)
1671: {
1672:   TS quadts = ts->quadraturets;

1674:   PetscFunctionBegin;
1676:   PetscTryTypeMethod(ts, forwardreset);
1677:   PetscCall(MatDestroy(&ts->mat_sensip));
1678:   if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip));
1679:   PetscCall(VecDestroy(&ts->vec_sensip_col));
1680:   ts->forward_solve      = PETSC_FALSE;
1681:   ts->forwardsetupcalled = PETSC_FALSE;
1682:   PetscFunctionReturn(PETSC_SUCCESS);
1683: }

1685: /*@
1686:   TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.

1688:   Input Parameters:
1689: + ts        - the `TS` context obtained from `TSCreate()`
1690: . numfwdint - number of integrals
1691: - vp        - the vectors containing the gradients for each integral w.r.t. parameters

1693:   Level: deprecated

1695: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1696: @*/
1697: PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec *vp)
1698: {
1699:   PetscFunctionBegin;
1701:   PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1702:   if (!ts->numcost) ts->numcost = numfwdint;

1704:   ts->vecs_integral_sensip = vp;
1705:   PetscFunctionReturn(PETSC_SUCCESS);
1706: }

1708: /*@
1709:   TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term.

1711:   Input Parameter:
1712: . ts - the `TS` context obtained from `TSCreate()`

1714:   Output Parameters:
1715: + numfwdint - number of integrals
1716: - vp        - the vectors containing the gradients for each integral w.r.t. parameters

1718:   Level: deprecated

1720: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardStep()`
1721: @*/
1722: PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec **vp)
1723: {
1724:   PetscFunctionBegin;
1726:   PetscAssertPointer(vp, 3);
1727:   if (numfwdint) *numfwdint = ts->numcost;
1728:   if (vp) *vp = ts->vecs_integral_sensip;
1729:   PetscFunctionReturn(PETSC_SUCCESS);
1730: }

1732: /*@
1733:   TSForwardStep - Compute the forward sensitivity for one time step.

1735:   Collective

1737:   Input Parameter:
1738: . ts - time stepping context

1740:   Level: advanced

1742:   Notes:
1743:   This function cannot be called until `TSStep()` has been completed.

1745: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()`
1746: @*/
1747: PetscErrorCode TSForwardStep(TS ts)
1748: {
1749:   PetscFunctionBegin;
1751:   PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0));
1752:   PetscUseTypeMethod(ts, forwardstep);
1753:   PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0));
1754:   PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]);
1755:   PetscFunctionReturn(PETSC_SUCCESS);
1756: }

1758: /*@
1759:   TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution  w.r.t. the problem parameters and initial values.

1761:   Logically Collective

1763:   Input Parameters:
1764: + ts   - the `TS` context obtained from `TSCreate()`
1765: . nump - number of parameters
1766: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1768:   Level: beginner

1770:   Notes:
1771:   Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1772:   This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically.
1773:   You must call this function before `TSSolve()`.
1774:   The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.

1776: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1777: @*/
1778: PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat)
1779: {
1780:   PetscFunctionBegin;
1783:   ts->forward_solve = PETSC_TRUE;
1784:   if (nump == PETSC_DEFAULT) {
1785:     PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters));
1786:   } else ts->num_parameters = nump;
1787:   PetscCall(PetscObjectReference((PetscObject)Smat));
1788:   PetscCall(MatDestroy(&ts->mat_sensip));
1789:   ts->mat_sensip = Smat;
1790:   PetscFunctionReturn(PETSC_SUCCESS);
1791: }

1793: /*@
1794:   TSForwardGetSensitivities - Returns the trajectory sensitivities

1796:   Not Collective, but Smat returned is parallel if ts is parallel

1798:   Output Parameters:
1799: + ts   - the `TS` context obtained from `TSCreate()`
1800: . nump - number of parameters
1801: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1803:   Level: intermediate

1805: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1806: @*/
1807: PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat)
1808: {
1809:   PetscFunctionBegin;
1811:   if (nump) *nump = ts->num_parameters;
1812:   if (Smat) *Smat = ts->mat_sensip;
1813:   PetscFunctionReturn(PETSC_SUCCESS);
1814: }

1816: /*@
1817:   TSForwardCostIntegral - Evaluate the cost integral in the forward run.

1819:   Collective

1821:   Input Parameter:
1822: . ts - time stepping context

1824:   Level: advanced

1826:   Note:
1827:   This function cannot be called until `TSStep()` has been completed.

1829: .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()`
1830: @*/
1831: PetscErrorCode TSForwardCostIntegral(TS ts)
1832: {
1833:   PetscFunctionBegin;
1835:   PetscUseTypeMethod(ts, forwardintegral);
1836:   PetscFunctionReturn(PETSC_SUCCESS);
1837: }

1839: /*@
1840:   TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities

1842:   Collective

1844:   Input Parameters:
1845: + ts   - the `TS` context obtained from `TSCreate()`
1846: - didp - parametric sensitivities of the initial condition

1848:   Level: intermediate

1850:   Notes:
1851:   `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way.
1852:   This function is used to set initial values for tangent linear variables.

1854: .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()`
1855: @*/
1856: PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp)
1857: {
1858:   PetscFunctionBegin;
1861:   if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DEFAULT, didp));
1862:   PetscFunctionReturn(PETSC_SUCCESS);
1863: }

1865: /*@
1866:   TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages

1868:   Input Parameter:
1869: . ts - the `TS` context obtained from `TSCreate()`

1871:   Output Parameters:
1872: + ns - number of stages
1873: - S  - tangent linear sensitivities at the intermediate stages

1875:   Level: advanced

1877: .seealso: `TS`
1878: @*/
1879: PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S)
1880: {
1881:   PetscFunctionBegin;

1884:   if (!ts->ops->getstages) *S = NULL;
1885:   else PetscUseTypeMethod(ts, forwardgetstages, ns, S);
1886:   PetscFunctionReturn(PETSC_SUCCESS);
1887: }

1889: /*@
1890:   TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time

1892:   Input Parameters:
1893: + ts  - the `TS` context obtained from `TSCreate()`
1894: - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run

1896:   Output Parameter:
1897: . quadts - the child `TS` context

1899:   Level: intermediate

1901: .seealso: [](ch_ts), `TSGetQuadratureTS()`
1902: @*/
1903: PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts)
1904: {
1905:   char prefix[128];

1907:   PetscFunctionBegin;
1909:   PetscAssertPointer(quadts, 3);
1910:   PetscCall(TSDestroy(&ts->quadraturets));
1911:   PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets));
1912:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1));
1913:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1914:   PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix));
1915:   *quadts = ts->quadraturets;

1917:   if (ts->numcost) {
1918:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol));
1919:   } else {
1920:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol));
1921:   }
1922:   ts->costintegralfwd = fwd;
1923:   PetscFunctionReturn(PETSC_SUCCESS);
1924: }

1926: /*@
1927:   TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time

1929:   Input Parameter:
1930: . ts - the `TS` context obtained from `TSCreate()`

1932:   Output Parameters:
1933: + fwd    - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1934: - quadts - the child `TS` context

1936:   Level: intermediate

1938: .seealso: [](ch_ts), `TSCreateQuadratureTS()`
1939: @*/
1940: PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts)
1941: {
1942:   PetscFunctionBegin;
1944:   if (fwd) *fwd = ts->costintegralfwd;
1945:   if (quadts) *quadts = ts->quadraturets;
1946:   PetscFunctionReturn(PETSC_SUCCESS);
1947: }

1949: /*@
1950:   TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS`

1952:   Collective

1954:   Input Parameters:
1955: + ts - the `TS` context obtained from `TSCreate()`
1956: - x  - state vector

1958:   Output Parameters:
1959: + J    - Jacobian matrix
1960: - Jpre - preconditioning matrix for J (may be same as J)

1962:   Level: developer

1964:   Note:
1965:   Uses finite differencing when `TS` Jacobian is not available.

1967: .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()`
1968: @*/
1969: PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre)
1970: {
1971:   SNES snes                                          = ts->snes;
1972:   PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL;

1974:   PetscFunctionBegin;
1975:   /*
1976:     Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
1977:     because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
1978:     explicit methods. Instead, we check the Jacobian compute function directly to determine if FD
1979:     coloring is used.
1980:   */
1981:   PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL));
1982:   if (jac == SNESComputeJacobianDefaultColor) {
1983:     Vec f;
1984:     PetscCall(SNESSetSolution(snes, x));
1985:     PetscCall(SNESGetFunction(snes, &f, NULL, NULL));
1986:     /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
1987:     PetscCall(SNESComputeFunction(snes, x, f));
1988:   }
1989:   PetscCall(SNESComputeJacobian(snes, x, J, Jpre));
1990:   PetscFunctionReturn(PETSC_SUCCESS);
1991: }