Actual source code: tssen.c

  1: #include <petsc/private/tsimpl.h>
  2: #include <petscdraw.h>

  4: PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval;

  6: /* #define TSADJOINT_STAGE */

  8: /* ------------------------ Sensitivity Context ---------------------------*/

 10: /*@C
 11:   TSSetRHSJacobianP - Sets the function that computes the Jacobian of $G$ w.r.t. the parameters $p$ where $U_t = G(U,p,t)$, as well as the location to store the matrix.

 13:   Logically Collective

 15:   Input Parameters:
 16: + ts   - `TS` context obtained from `TSCreate()`
 17: . Amat - JacobianP matrix
 18: . func - function
 19: - ctx  - [optional] user-defined function context

 21:   Level: intermediate

 23:   Note:
 24:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

 26: .seealso: [](ch_ts), `TS`, `TSRHSJacobianPFn`, `TSGetRHSJacobianP()`
 27: @*/
 28: PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, TSRHSJacobianPFn *func, void *ctx)
 29: {
 30:   PetscFunctionBegin;

 34:   ts->rhsjacobianp    = func;
 35:   ts->rhsjacobianpctx = ctx;
 36:   if (Amat) {
 37:     PetscCall(PetscObjectReference((PetscObject)Amat));
 38:     PetscCall(MatDestroy(&ts->Jacprhs));
 39:     ts->Jacprhs = Amat;
 40:   }
 41:   PetscFunctionReturn(PETSC_SUCCESS);
 42: }

 44: /*@C
 45:   TSGetRHSJacobianP - Gets the function that computes the Jacobian of $G $ w.r.t. the parameters $p$ where $ U_t = G(U,p,t)$, as well as the location to store the matrix.

 47:   Logically Collective

 49:   Input Parameter:
 50: . ts - `TS` context obtained from `TSCreate()`

 52:   Output Parameters:
 53: + Amat - JacobianP matrix
 54: . func - function
 55: - ctx  - [optional] user-defined function context

 57:   Level: intermediate

 59:   Note:
 60:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

 62: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSRHSJacobianPFn`
 63: @*/
 64: PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, TSRHSJacobianPFn **func, void **ctx)
 65: {
 66:   PetscFunctionBegin;
 67:   if (func) *func = ts->rhsjacobianp;
 68:   if (ctx) *ctx = ts->rhsjacobianpctx;
 69:   if (Amat) *Amat = ts->Jacprhs;
 70:   PetscFunctionReturn(PETSC_SUCCESS);
 71: }

 73: /*@C
 74:   TSComputeRHSJacobianP - Runs the user-defined JacobianP function.

 76:   Collective

 78:   Input Parameters:
 79: + ts - The `TS` context obtained from `TSCreate()`
 80: . t  - the time
 81: - U  - the solution at which to compute the Jacobian

 83:   Output Parameter:
 84: . Amat - the computed Jacobian

 86:   Level: developer

 88: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
 89: @*/
 90: PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat)
 91: {
 92:   PetscFunctionBegin;
 93:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

 97:   if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
 98:   else {
 99:     PetscBool assembled;
100:     PetscCall(MatZeroEntries(Amat));
101:     PetscCall(MatAssembled(Amat, &assembled));
102:     if (!assembled) {
103:       PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
104:       PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
105:     }
106:   }
107:   PetscFunctionReturn(PETSC_SUCCESS);
108: }

110: /*@C
111:   TSSetIJacobianP - Sets the function that computes the Jacobian of $F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t)$, as well as the location to store the matrix.

113:   Logically Collective

115:   Input Parameters:
116: + ts   - `TS` context obtained from `TSCreate()`
117: . Amat - JacobianP matrix
118: . func - function
119: - ctx  - [optional] user-defined function context

121:   Calling sequence of `func`:
122: + ts    - the `TS` context
123: . t     - current timestep
124: . U     - input vector (current ODE solution)
125: . Udot  - time derivative of state vector
126: . shift - shift to apply, see the note in `TSSetIJacobian()`
127: . A     - output matrix
128: - ctx   - [optional] user-defined function context

130:   Level: intermediate

132:   Note:
133:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

135: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
136: @*/
137: PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, void *ctx), void *ctx)
138: {
139:   PetscFunctionBegin;

143:   ts->ijacobianp    = func;
144:   ts->ijacobianpctx = ctx;
145:   if (Amat) {
146:     PetscCall(PetscObjectReference((PetscObject)Amat));
147:     PetscCall(MatDestroy(&ts->Jacp));
148:     ts->Jacp = Amat;
149:   }
150:   PetscFunctionReturn(PETSC_SUCCESS);
151: }

153: /*@C
154:   TSGetIJacobianP - Gets the function that computes the Jacobian of $ F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t) $, as well as the location to store the matrix.

156:   Logically Collective

158:   Input Parameter:
159: . ts - `TS` context obtained from `TSCreate()`

161:   Output Parameters:
162: + Amat - JacobianP matrix
163: . func - the function that computes the JacobianP
164: - ctx  - [optional] user-defined function context

166:   Calling sequence of `func`:
167: + ts    - the `TS` context
168: . t     - current timestep
169: . U     - input vector (current ODE solution)
170: . Udot  - time derivative of state vector
171: . shift - shift to apply, see the note in `TSSetIJacobian()`
172: . A     - output matrix
173: - ctx   - [optional] user-defined function context

175:   Level: intermediate

177:   Note:
178:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

180: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSSetIJacobianP()`, `TSGetRHSJacobianP()`
181: @*/
182: PetscErrorCode TSGetIJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, void *ctx), void **ctx)
183: {
184:   PetscFunctionBegin;

187:   if (func) *func = ts->ijacobianp;
188:   if (ctx) *ctx = ts->ijacobianpctx;
189:   if (Amat) *Amat = ts->Jacp;
190:   PetscFunctionReturn(PETSC_SUCCESS);
191: }

193: /*@
194:   TSComputeIJacobianP - Runs the user-defined IJacobianP function.

196:   Collective

198:   Input Parameters:
199: + ts    - the `TS` context
200: . t     - current timestep
201: . U     - state vector
202: . Udot  - time derivative of state vector
203: . shift - shift to apply, see note below
204: - imex  - flag indicates if the method is IMEX so that the `RHSJacobianP` should be kept separate

206:   Output Parameter:
207: . Amat - Jacobian matrix

209:   Level: developer

211: .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()`
212: @*/
213: PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex)
214: {
215:   PetscFunctionBegin;
216:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

221:   PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0));
222:   if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx));
223:   if (imex) {
224:     if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */
225:       PetscBool assembled;
226:       PetscCall(MatZeroEntries(Amat));
227:       PetscCall(MatAssembled(Amat, &assembled));
228:       if (!assembled) {
229:         PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
230:         PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
231:       }
232:     }
233:   } else {
234:     if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs));
235:     if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
236:       PetscCall(MatScale(Amat, -1));
237:     } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
238:       MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
239:       if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
240:         PetscCall(MatZeroEntries(Amat));
241:       }
242:       PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy));
243:     }
244:   }
245:   PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0));
246:   PetscFunctionReturn(PETSC_SUCCESS);
247: }

249: /*@C
250:   TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions

252:   Logically Collective

254:   Input Parameters:
255: + ts           - the `TS` context obtained from `TSCreate()`
256: . numcost      - number of gradients to be computed, this is the number of cost functions
257: . costintegral - vector that stores the integral values
258: . rf           - routine for evaluating the integrand function
259: . drduf        - function that computes the gradients of the r with respect to u
260: . drdpf        - function that computes the gradients of the r with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`)
261: . fwd          - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
262: - ctx          - [optional] user-defined context for private data for the function evaluation routine (may be `NULL`)

264:   Calling sequence of `rf`:
265: + ts  - the integrator
266: . t   - the time
267: . U   - the solution
268: . F   - the computed value of the function
269: - ctx - the user context

271:   Calling sequence of `drduf`:
272: + ts   - the integrator
273: . t    - the time
274: . U    - the solution
275: . dRdU - the computed gradients of the r with respect to u
276: - ctx  - the user context

278:   Calling sequence of `drdpf`:
279: + ts   - the integrator
280: . t    - the time
281: . U    - the solution
282: . dRdP - the computed gradients of the r with respect to p
283: - ctx  - the user context

285:   Level: deprecated

287:   Notes:
288:   For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions

290:   Use `TSCreateQuadratureTS()` and `TSForwardSetSensitivities()` instead

292: .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`,
293:           `TSCreateQuadratureTS()`, `TSForwardSetSensitivities()`
294: @*/
295: PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS ts, PetscReal t, Vec U, Vec F, void *ctx), PetscErrorCode (*drduf)(TS ts, PetscReal t, Vec U, Vec *dRdU, void *ctx), PetscErrorCode (*drdpf)(TS ts, PetscReal t, Vec U, Vec *dRdP, void *ctx), PetscBool fwd, void *ctx)
296: {
297:   PetscFunctionBegin;
300:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
301:   if (!ts->numcost) ts->numcost = numcost;

303:   if (costintegral) {
304:     PetscCall(PetscObjectReference((PetscObject)costintegral));
305:     PetscCall(VecDestroy(&ts->vec_costintegral));
306:     ts->vec_costintegral = costintegral;
307:   } else {
308:     if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
309:       PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral));
310:     } else {
311:       PetscCall(VecSet(ts->vec_costintegral, 0.0));
312:     }
313:   }
314:   if (!ts->vec_costintegrand) {
315:     PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand));
316:   } else {
317:     PetscCall(VecSet(ts->vec_costintegrand, 0.0));
318:   }
319:   ts->costintegralfwd  = fwd; /* Evaluate the cost integral in forward run if fwd is true */
320:   ts->costintegrand    = rf;
321:   ts->costintegrandctx = ctx;
322:   ts->drdufunction     = drduf;
323:   ts->drdpfunction     = drdpf;
324:   PetscFunctionReturn(PETSC_SUCCESS);
325: }

327: /*@
328:   TSGetCostIntegral - Returns the values of the integral term in the cost functions.
329:   It is valid to call the routine after a backward run.

331:   Not Collective

333:   Input Parameter:
334: . ts - the `TS` context obtained from `TSCreate()`

336:   Output Parameter:
337: . v - the vector containing the integrals for each cost function

339:   Level: intermediate

341: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
342: @*/
343: PetscErrorCode TSGetCostIntegral(TS ts, Vec *v)
344: {
345:   TS quadts;

347:   PetscFunctionBegin;
349:   PetscAssertPointer(v, 2);
350:   PetscCall(TSGetQuadratureTS(ts, NULL, &quadts));
351:   *v = quadts->vec_sol;
352:   PetscFunctionReturn(PETSC_SUCCESS);
353: }

355: /*@
356:   TSComputeCostIntegrand - Evaluates the integral function in the cost functions.

358:   Input Parameters:
359: + ts - the `TS` context
360: . t  - current time
361: - U  - state vector, i.e. current solution

363:   Output Parameter:
364: . Q - vector of size numcost to hold the outputs

366:   Level: deprecated

368:   Note:
369:   Most users should not need to explicitly call this routine, as it
370:   is used internally within the sensitivity analysis context.

372: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
373: @*/
374: PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q)
375: {
376:   PetscFunctionBegin;

381:   PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0));
382:   if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx));
383:   else PetscCall(VecZeroEntries(Q));
384:   PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0));
385:   PetscFunctionReturn(PETSC_SUCCESS);
386: }

388: // PetscClangLinter pragma disable: -fdoc-*
389: /*@
390:   TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

392:   Level: deprecated

394: @*/
395: PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
396: {
397:   PetscFunctionBegin;
398:   if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS);

402:   PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
403:   PetscFunctionReturn(PETSC_SUCCESS);
404: }

406: // PetscClangLinter pragma disable: -fdoc-*
407: /*@
408:   TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

410:   Level: deprecated

412: @*/
413: PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
414: {
415:   PetscFunctionBegin;
416:   if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS);

420:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
421:   PetscFunctionReturn(PETSC_SUCCESS);
422: }

424: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
425: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
426: /*@C
427:   TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable.

429:   Logically Collective

431:   Input Parameters:
432: + ts   - `TS` context obtained from `TSCreate()`
433: . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU
434: . hessianproductfunc1 - vector-Hessian-vector product function for F_UU
435: . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP
436: . hessianproductfunc2 - vector-Hessian-vector product function for F_UP
437: . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU
438: . hessianproductfunc3 - vector-Hessian-vector product function for F_PU
439: . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP
440: - hessianproductfunc4 - vector-Hessian-vector product function for F_PP

442:   Calling sequence of `ihessianproductfunc1`:
443: + ts  - the `TS` context
444: + t   - current timestep
445: . U   - input vector (current ODE solution)
446: . Vl  - an array of input vectors to be left-multiplied with the Hessian
447: . Vr  - input vector to be right-multiplied with the Hessian
448: . VHV - an array of output vectors for vector-Hessian-vector product
449: - ctx - [optional] user-defined function context

451:   Level: intermediate

453:   Notes:
454:   All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
455:   descriptions are omitted for brevity.

457:   The first Hessian function and the working array are required.
458:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
459:   $ Vl_n^T*F_UP*Vr
460:   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M.
461:   Each entry of F_UP corresponds to the derivative
462:   $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.
463:   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being
464:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}
465:   If the cost function is a scalar, there will be only one vector in Vl and VHV.

467: .seealso: [](ch_ts), `TS`
468: @*/
469: PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
470: {
471:   PetscFunctionBegin;
473:   PetscAssertPointer(ihp1, 2);

475:   ts->ihessianproductctx = ctx;
476:   if (ihp1) ts->vecs_fuu = ihp1;
477:   if (ihp2) ts->vecs_fup = ihp2;
478:   if (ihp3) ts->vecs_fpu = ihp3;
479:   if (ihp4) ts->vecs_fpp = ihp4;
480:   ts->ihessianproduct_fuu = ihessianproductfunc1;
481:   ts->ihessianproduct_fup = ihessianproductfunc2;
482:   ts->ihessianproduct_fpu = ihessianproductfunc3;
483:   ts->ihessianproduct_fpp = ihessianproductfunc4;
484:   PetscFunctionReturn(PETSC_SUCCESS);
485: }

487: /*@
488:   TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.

490:   Collective

492:   Input Parameters:
493: + ts - The `TS` context obtained from `TSCreate()`
494: . t  - the time
495: . U  - the solution at which to compute the Hessian product
496: . Vl - the array of input vectors to be multiplied with the Hessian from the left
497: - Vr - the input vector to be multiplied with the Hessian from the right

499:   Output Parameter:
500: . VHV - the array of output vectors that store the Hessian product

502:   Level: developer

504:   Note:
505:   `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation,
506:   so most users would not generally call this routine themselves.

508: .seealso: [](ch_ts), `TSSetIHessianProduct()`
509: @*/
510: PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
511: {
512:   PetscFunctionBegin;
513:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

517:   if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

519:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
520:   if (ts->rhshessianproduct_guu) {
521:     PetscInt nadj;
522:     PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV));
523:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
524:   }
525:   PetscFunctionReturn(PETSC_SUCCESS);
526: }

528: /*@
529:   TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.

531:   Collective

533:   Input Parameters:
534: + ts - The `TS` context obtained from `TSCreate()`
535: . t  - the time
536: . U  - the solution at which to compute the Hessian product
537: . Vl - the array of input vectors to be multiplied with the Hessian from the left
538: - Vr - the input vector to be multiplied with the Hessian from the right

540:   Output Parameter:
541: . VHV - the array of output vectors that store the Hessian product

543:   Level: developer

545:   Note:
546:   `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation,
547:   so most users would not generally call this routine themselves.

549: .seealso: [](ch_ts), `TSSetIHessianProduct()`
550: @*/
551: PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
552: {
553:   PetscFunctionBegin;
554:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

558:   if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

560:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
561:   if (ts->rhshessianproduct_gup) {
562:     PetscInt nadj;
563:     PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV));
564:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
565:   }
566:   PetscFunctionReturn(PETSC_SUCCESS);
567: }

569: /*@
570:   TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.

572:   Collective

574:   Input Parameters:
575: + ts - The `TS` context obtained from `TSCreate()`
576: . t  - the time
577: . U  - the solution at which to compute the Hessian product
578: . Vl - the array of input vectors to be multiplied with the Hessian from the left
579: - Vr - the input vector to be multiplied with the Hessian from the right

581:   Output Parameter:
582: . VHV - the array of output vectors that store the Hessian product

584:   Level: developer

586:   Note:
587:   `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation,
588:   so most users would not generally call this routine themselves.

590: .seealso: [](ch_ts), `TSSetIHessianProduct()`
591: @*/
592: PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
593: {
594:   PetscFunctionBegin;
595:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

599:   if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

601:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
602:   if (ts->rhshessianproduct_gpu) {
603:     PetscInt nadj;
604:     PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV));
605:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
606:   }
607:   PetscFunctionReturn(PETSC_SUCCESS);
608: }

610: /*@C
611:   TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.

613:   Collective

615:   Input Parameters:
616: + ts - The `TS` context obtained from `TSCreate()`
617: . t  - the time
618: . U  - the solution at which to compute the Hessian product
619: . Vl - the array of input vectors to be multiplied with the Hessian from the left
620: - Vr - the input vector to be multiplied with the Hessian from the right

622:   Output Parameter:
623: . VHV - the array of output vectors that store the Hessian product

625:   Level: developer

627:   Note:
628:   `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation,
629:   so most users would not generally call this routine themselves.

631: .seealso: [](ch_ts), `TSSetIHessianProduct()`
632: @*/
633: PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
634: {
635:   PetscFunctionBegin;
636:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

640:   if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

642:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
643:   if (ts->rhshessianproduct_gpp) {
644:     PetscInt nadj;
645:     PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV));
646:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
647:   }
648:   PetscFunctionReturn(PETSC_SUCCESS);
649: }

651: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
652: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
653: /*@C
654:   TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector
655:   product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state
656:   variable.

658:   Logically Collective

660:   Input Parameters:
661: + ts     - `TS` context obtained from `TSCreate()`
662: . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU
663: . hessianproductfunc1 - vector-Hessian-vector product function for G_UU
664: . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP
665: . hessianproductfunc2 - vector-Hessian-vector product function for G_UP
666: . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU
667: . hessianproductfunc3 - vector-Hessian-vector product function for G_PU
668: . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP
669: . hessianproductfunc4 - vector-Hessian-vector product function for G_PP
670: - ctx    - [optional] user-defined function context

672:   Calling sequence of `rhshessianproductfunc1`:
673: + ts  - the `TS` context
674: . t   - current timestep
675: . U   - input vector (current ODE solution)
676: . Vl  - an array of input vectors to be left-multiplied with the Hessian
677: . Vr  - input vector to be right-multiplied with the Hessian
678: . VHV - an array of output vectors for vector-Hessian-vector product
679: - ctx - [optional] user-defined function context

681:   Level: intermediate

683:   Notes:
684:   All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
685:   descriptions are omitted for brevity.

687:   The first Hessian function and the working array are required.

689:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
690:   $ Vl_n^T*G_UP*Vr
691:   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M.
692:   Each entry of G_UP corresponds to the derivative
693:   $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.
694:   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being
695:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}
696:   If the cost function is a scalar, there will be only one vector in Vl and VHV.

698: .seealso: `TS`, `TSAdjoint`
699: @*/
700: PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *rhshp2, PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp3, PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp4, PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
701: {
702:   PetscFunctionBegin;
704:   PetscAssertPointer(rhshp1, 2);

706:   ts->rhshessianproductctx = ctx;
707:   if (rhshp1) ts->vecs_guu = rhshp1;
708:   if (rhshp2) ts->vecs_gup = rhshp2;
709:   if (rhshp3) ts->vecs_gpu = rhshp3;
710:   if (rhshp4) ts->vecs_gpp = rhshp4;
711:   ts->rhshessianproduct_guu = rhshessianproductfunc1;
712:   ts->rhshessianproduct_gup = rhshessianproductfunc2;
713:   ts->rhshessianproduct_gpu = rhshessianproductfunc3;
714:   ts->rhshessianproduct_gpp = rhshessianproductfunc4;
715:   PetscFunctionReturn(PETSC_SUCCESS);
716: }

718: /*@
719:   TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu.

721:   Collective

723:   Input Parameters:
724: + ts - The `TS` context obtained from `TSCreate()`
725: . t  - the time
726: . U  - the solution at which to compute the Hessian product
727: . Vl - the array of input vectors to be multiplied with the Hessian from the left
728: - Vr - the input vector to be multiplied with the Hessian from the right

730:   Output Parameter:
731: . VHV - the array of output vectors that store the Hessian product

733:   Level: developer

735:   Note:
736:   `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation,
737:   so most users would not generally call this routine themselves.

739: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
740: @*/
741: PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
742: {
743:   PetscFunctionBegin;
744:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

748:   PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
749:   PetscFunctionReturn(PETSC_SUCCESS);
750: }

752: /*@
753:   TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup.

755:   Collective

757:   Input Parameters:
758: + ts - The `TS` context obtained from `TSCreate()`
759: . t  - the time
760: . U  - the solution at which to compute the Hessian product
761: . Vl - the array of input vectors to be multiplied with the Hessian from the left
762: - Vr - the input vector to be multiplied with the Hessian from the right

764:   Output Parameter:
765: . VHV - the array of output vectors that store the Hessian product

767:   Level: developer

769:   Note:
770:   `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation,
771:   so most users would not generally call this routine themselves.

773: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
774: @*/
775: PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
776: {
777:   PetscFunctionBegin;
778:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

782:   PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
783:   PetscFunctionReturn(PETSC_SUCCESS);
784: }

786: /*@
787:   TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu.

789:   Collective

791:   Input Parameters:
792: + ts - The `TS` context obtained from `TSCreate()`
793: . t  - the time
794: . U  - the solution at which to compute the Hessian product
795: . Vl - the array of input vectors to be multiplied with the Hessian from the left
796: - Vr - the input vector to be multiplied with the Hessian from the right

798:   Output Parameter:
799: . VHV - the array of output vectors that store the Hessian product

801:   Level: developer

803:   Note:
804:   `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation,
805:   so most users would not generally call this routine themselves.

807: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
808: @*/
809: PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
810: {
811:   PetscFunctionBegin;
812:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

816:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
817:   PetscFunctionReturn(PETSC_SUCCESS);
818: }

820: /*@
821:   TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp.

823:   Collective

825:   Input Parameters:
826: + ts - The `TS` context obtained from `TSCreate()`
827: . t  - the time
828: . U  - the solution at which to compute the Hessian product
829: . Vl - the array of input vectors to be multiplied with the Hessian from the left
830: - Vr - the input vector to be multiplied with the Hessian from the right

832:   Output Parameter:
833: . VHV - the array of output vectors that store the Hessian product

835:   Level: developer

837:   Note:
838:   `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation,
839:   so most users would not generally call this routine themselves.

841: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
842: @*/
843: PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
844: {
845:   PetscFunctionBegin;
846:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

850:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
851:   PetscFunctionReturn(PETSC_SUCCESS);
852: }

854: /* --------------------------- Adjoint sensitivity ---------------------------*/

856: /*@
857:   TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
858:   for use by the `TS` adjoint routines.

860:   Logically Collective

862:   Input Parameters:
863: + ts      - the `TS` context obtained from `TSCreate()`
864: . numcost - number of gradients to be computed, this is the number of cost functions
865: . lambda  - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
866: - mu      - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

868:   Level: beginner

870:   Notes:
871:   the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime  mu_i = df/dp|finaltime

873:   After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities

875: .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()`
876: @*/
877: PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec *lambda, Vec *mu)
878: {
879:   PetscFunctionBegin;
881:   PetscAssertPointer(lambda, 3);
882:   ts->vecs_sensi  = lambda;
883:   ts->vecs_sensip = mu;
884:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
885:   ts->numcost = numcost;
886:   PetscFunctionReturn(PETSC_SUCCESS);
887: }

889: /*@
890:   TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()`

892:   Not Collective, but the vectors returned are parallel if `TS` is parallel

894:   Input Parameter:
895: . ts - the `TS` context obtained from `TSCreate()`

897:   Output Parameters:
898: + numcost - size of returned arrays
899: . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
900: - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters

902:   Level: intermediate

904: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()`
905: @*/
906: PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec **lambda, Vec **mu)
907: {
908:   PetscFunctionBegin;
910:   if (numcost) *numcost = ts->numcost;
911:   if (lambda) *lambda = ts->vecs_sensi;
912:   if (mu) *mu = ts->vecs_sensip;
913:   PetscFunctionReturn(PETSC_SUCCESS);
914: }

916: /*@
917:   TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
918:   for use by the `TS` adjoint routines.

920:   Logically Collective

922:   Input Parameters:
923: + ts      - the `TS` context obtained from `TSCreate()`
924: . numcost - number of cost functions
925: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
926: . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
927: - dir     - the direction vector that are multiplied with the Hessian of the cost functions

929:   Level: beginner

931:   Notes:
932:   Hessian of the cost function is completely different from Hessian of the ODE/DAE system

934:   For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`.

936:   After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.

938:   Passing `NULL` for `lambda2` disables the second-order calculation.

940: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()`
941: @*/
942: PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec *lambda2, Vec *mu2, Vec dir)
943: {
944:   PetscFunctionBegin;
946:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
947:   ts->numcost      = numcost;
948:   ts->vecs_sensi2  = lambda2;
949:   ts->vecs_sensi2p = mu2;
950:   ts->vec_dir      = dir;
951:   PetscFunctionReturn(PETSC_SUCCESS);
952: }

954: /*@
955:   TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()`

957:   Not Collective, but vectors returned are parallel if `TS` is parallel

959:   Input Parameter:
960: . ts - the `TS` context obtained from `TSCreate()`

962:   Output Parameters:
963: + numcost - number of cost functions
964: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
965: . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
966: - dir     - the direction vector that are multiplied with the Hessian of the cost functions

968:   Level: intermediate

970: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`
971: @*/
972: PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec **lambda2, Vec **mu2, Vec *dir)
973: {
974:   PetscFunctionBegin;
976:   if (numcost) *numcost = ts->numcost;
977:   if (lambda2) *lambda2 = ts->vecs_sensi2;
978:   if (mu2) *mu2 = ts->vecs_sensi2p;
979:   if (dir) *dir = ts->vec_dir;
980:   PetscFunctionReturn(PETSC_SUCCESS);
981: }

983: /*@
984:   TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities

986:   Logically Collective

988:   Input Parameters:
989: + ts   - the `TS` context obtained from `TSCreate()`
990: - didp - the derivative of initial values w.r.t. parameters

992:   Level: intermediate

994:   Notes:
995:   When computing sensitivities w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically.
996:   `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver.

998: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()`
999: @*/
1000: PetscErrorCode TSAdjointSetForward(TS ts, Mat didp)
1001: {
1002:   Mat          A;
1003:   Vec          sp;
1004:   PetscScalar *xarr;
1005:   PetscInt     lsize;

1007:   PetscFunctionBegin;
1008:   ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
1009:   PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first");
1010:   PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
1011:   /* create a single-column dense matrix */
1012:   PetscCall(VecGetLocalSize(ts->vec_sol, &lsize));
1013:   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A));

1015:   PetscCall(VecDuplicate(ts->vec_sol, &sp));
1016:   PetscCall(MatDenseGetColumn(A, 0, &xarr));
1017:   PetscCall(VecPlaceArray(sp, xarr));
1018:   if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
1019:     if (didp) {
1020:       PetscCall(MatMult(didp, ts->vec_dir, sp));
1021:       PetscCall(VecScale(sp, 2.));
1022:     } else {
1023:       PetscCall(VecZeroEntries(sp));
1024:     }
1025:   } else { /* tangent linear variable initialized as dir */
1026:     PetscCall(VecCopy(ts->vec_dir, sp));
1027:   }
1028:   PetscCall(VecResetArray(sp));
1029:   PetscCall(MatDenseRestoreColumn(A, &xarr));
1030:   PetscCall(VecDestroy(&sp));

1032:   PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */

1034:   PetscCall(MatDestroy(&A));
1035:   PetscFunctionReturn(PETSC_SUCCESS);
1036: }

1038: /*@
1039:   TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context

1041:   Logically Collective

1043:   Input Parameter:
1044: . ts - the `TS` context obtained from `TSCreate()`

1046:   Level: intermediate

1048: .seealso: [](ch_ts), `TSAdjointSetForward()`
1049: @*/
1050: PetscErrorCode TSAdjointResetForward(TS ts)
1051: {
1052:   PetscFunctionBegin;
1053:   ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
1054:   PetscCall(TSForwardReset(ts));
1055:   PetscFunctionReturn(PETSC_SUCCESS);
1056: }

1058: /*@
1059:   TSAdjointSetUp - Sets up the internal data structures for the later use
1060:   of an adjoint solver

1062:   Collective

1064:   Input Parameter:
1065: . ts - the `TS` context obtained from `TSCreate()`

1067:   Level: advanced

1069: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()`
1070: @*/
1071: PetscErrorCode TSAdjointSetUp(TS ts)
1072: {
1073:   TSTrajectory tj;
1074:   PetscBool    match;

1076:   PetscFunctionBegin;
1078:   if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1079:   PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first");
1080:   PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
1081:   PetscCall(TSGetTrajectory(ts, &tj));
1082:   PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match));
1083:   if (match) {
1084:     PetscBool solution_only;
1085:     PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only));
1086:     PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
1087:   }
1088:   PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */

1090:   if (ts->quadraturets) { /* if there is integral in the cost function */
1091:     PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col));
1092:     if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col));
1093:   }

1095:   PetscTryTypeMethod(ts, adjointsetup);
1096:   ts->adjointsetupcalled = PETSC_TRUE;
1097:   PetscFunctionReturn(PETSC_SUCCESS);
1098: }

1100: /*@
1101:   TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s.

1103:   Collective

1105:   Input Parameter:
1106: . ts - the `TS` context obtained from `TSCreate()`

1108:   Level: beginner

1110: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSDestroy()`
1111: @*/
1112: PetscErrorCode TSAdjointReset(TS ts)
1113: {
1114:   PetscFunctionBegin;
1116:   PetscTryTypeMethod(ts, adjointreset);
1117:   if (ts->quadraturets) { /* if there is integral in the cost function */
1118:     PetscCall(VecDestroy(&ts->vec_drdu_col));
1119:     if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col));
1120:   }
1121:   ts->vecs_sensi         = NULL;
1122:   ts->vecs_sensip        = NULL;
1123:   ts->vecs_sensi2        = NULL;
1124:   ts->vecs_sensi2p       = NULL;
1125:   ts->vec_dir            = NULL;
1126:   ts->adjointsetupcalled = PETSC_FALSE;
1127:   PetscFunctionReturn(PETSC_SUCCESS);
1128: }

1130: /*@
1131:   TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time

1133:   Logically Collective

1135:   Input Parameters:
1136: + ts    - the `TS` context obtained from `TSCreate()`
1137: - steps - number of steps to use

1139:   Level: intermediate

1141:   Notes:
1142:   Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this
1143:   so as to integrate back to less than the original timestep

1145: .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()`
1146: @*/
1147: PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps)
1148: {
1149:   PetscFunctionBegin;
1152:   PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps");
1153:   PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps");
1154:   ts->adjoint_max_steps = steps;
1155:   PetscFunctionReturn(PETSC_SUCCESS);
1156: }

1158: // PetscClangLinter pragma disable: -fdoc-*
1159: /*@C
1160:   TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()`

1162:   Level: deprecated
1163: @*/
1164: PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx)
1165: {
1166:   PetscFunctionBegin;

1170:   ts->rhsjacobianp    = func;
1171:   ts->rhsjacobianpctx = ctx;
1172:   if (Amat) {
1173:     PetscCall(PetscObjectReference((PetscObject)Amat));
1174:     PetscCall(MatDestroy(&ts->Jacp));
1175:     ts->Jacp = Amat;
1176:   }
1177:   PetscFunctionReturn(PETSC_SUCCESS);
1178: }

1180: // PetscClangLinter pragma disable: -fdoc-*
1181: /*@C
1182:   TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()`

1184:   Level: deprecated
1185: @*/
1186: PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat)
1187: {
1188:   PetscFunctionBegin;

1193:   PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
1194:   PetscFunctionReturn(PETSC_SUCCESS);
1195: }

1197: // PetscClangLinter pragma disable: -fdoc-*
1198: /*@
1199:   TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

1201:   Level: deprecated
1202: @*/
1203: PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
1204: {
1205:   PetscFunctionBegin;

1209:   PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
1210:   PetscFunctionReturn(PETSC_SUCCESS);
1211: }

1213: // PetscClangLinter pragma disable: -fdoc-*
1214: /*@
1215:   TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

1217:   Level: deprecated
1218: @*/
1219: PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
1220: {
1221:   PetscFunctionBegin;

1225:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
1226:   PetscFunctionReturn(PETSC_SUCCESS);
1227: }

1229: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
1230: /*@C
1231:   TSAdjointMonitorSensi - monitors the first lambda sensitivity

1233:   Level: intermediate

1235: .seealso: [](ch_ts), `TSAdjointMonitorSet()`
1236: @*/
1237: static PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1238: {
1239:   PetscViewer viewer = vf->viewer;

1241:   PetscFunctionBegin;
1243:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1244:   PetscCall(VecView(lambda[0], viewer));
1245:   PetscCall(PetscViewerPopFormat(viewer));
1246:   PetscFunctionReturn(PETSC_SUCCESS);
1247: }

1249: /*@C
1250:   TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user

1252:   Collective

1254:   Input Parameters:
1255: + ts           - `TS` object you wish to monitor
1256: . name         - the monitor type one is seeking
1257: . help         - message indicating what monitoring is done
1258: . manual       - manual page for the monitor
1259: . monitor      - the monitor function, its context must be a `PetscViewerAndFormat`
1260: - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects

1262:   Level: developer

1264: .seealso: [](ch_ts), `PetscOptionsCreateViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`,
1265:           `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()`
1266:           `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`,
1267:           `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`,
1268:           `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`,
1269:           `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`,
1270:           `PetscOptionsFList()`, `PetscOptionsEList()`, `PetscViewerAndFormat`
1271: @*/
1272: PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *))
1273: {
1274:   PetscViewer       viewer;
1275:   PetscViewerFormat format;
1276:   PetscBool         flg;

1278:   PetscFunctionBegin;
1279:   PetscCall(PetscOptionsCreateViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg));
1280:   if (flg) {
1281:     PetscViewerAndFormat *vf;
1282:     PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf));
1283:     PetscCall(PetscViewerDestroy(&viewer));
1284:     if (monitorsetup) PetscCall((*monitorsetup)(ts, vf));
1285:     PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode (*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscCtxDestroyFn *)PetscViewerAndFormatDestroy));
1286:   }
1287:   PetscFunctionReturn(PETSC_SUCCESS);
1288: }

1290: /*@C
1291:   TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1292:   timestep to display the iteration's  progress.

1294:   Logically Collective

1296:   Input Parameters:
1297: + ts              - the `TS` context obtained from `TSCreate()`
1298: . adjointmonitor  - monitoring routine
1299: . adjointmctx     - [optional] user-defined context for private data for the monitor routine
1300:                     (use `NULL` if no context is desired)
1301: - adjointmdestroy - [optional] routine that frees monitor context (may be `NULL`), see `PetscCtxDestroyFn` for its calling sequence

1303:   Calling sequence of `adjointmonitor`:
1304: + ts          - the `TS` context
1305: . steps       - iteration number (after the final time step the monitor routine is called with
1306:                 a step of -1, this is at the final time which may have been interpolated to)
1307: . time        - current time
1308: . u           - current iterate
1309: . numcost     - number of cost functionos
1310: . lambda      - sensitivities to initial conditions
1311: . mu          - sensitivities to parameters
1312: - adjointmctx - [optional] adjoint monitoring context

1314:   Level: intermediate

1316:   Note:
1317:   This routine adds an additional monitor to the list of monitors that
1318:   already has been loaded.

1320:   Fortran Notes:
1321:   Only a single monitor function can be set for each `TS` object

1323: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()`, `PetscCtxDestroyFn`
1324: @*/
1325: PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *adjointmctx), void *adjointmctx, PetscCtxDestroyFn *adjointmdestroy)
1326: {
1327:   PetscInt  i;
1328:   PetscBool identical;

1330:   PetscFunctionBegin;
1332:   for (i = 0; i < ts->numbermonitors; i++) {
1333:     PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode (*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical));
1334:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
1335:   }
1336:   PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set");
1337:   ts->adjointmonitor[ts->numberadjointmonitors]          = adjointmonitor;
1338:   ts->adjointmonitordestroy[ts->numberadjointmonitors]   = adjointmdestroy;
1339:   ts->adjointmonitorcontext[ts->numberadjointmonitors++] = adjointmctx;
1340:   PetscFunctionReturn(PETSC_SUCCESS);
1341: }

1343: /*@C
1344:   TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.

1346:   Logically Collective

1348:   Input Parameter:
1349: . ts - the `TS` context obtained from `TSCreate()`

1351:   Notes:
1352:   There is no way to remove a single, specific monitor.

1354:   Level: intermediate

1356: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1357: @*/
1358: PetscErrorCode TSAdjointMonitorCancel(TS ts)
1359: {
1360:   PetscInt i;

1362:   PetscFunctionBegin;
1364:   for (i = 0; i < ts->numberadjointmonitors; i++) {
1365:     if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1366:   }
1367:   ts->numberadjointmonitors = 0;
1368:   PetscFunctionReturn(PETSC_SUCCESS);
1369: }

1371: /*@C
1372:   TSAdjointMonitorDefault - the default monitor of adjoint computations

1374:   Input Parameters:
1375: + ts      - the `TS` context
1376: . step    - iteration number (after the final time step the monitor routine is called with a
1377: step of -1, this is at the final time which may have been interpolated to)
1378: . time    - current time
1379: . v       - current iterate
1380: . numcost - number of cost functionos
1381: . lambda  - sensitivities to initial conditions
1382: . mu      - sensitivities to parameters
1383: - vf      - the viewer and format

1385:   Level: intermediate

1387: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1388: @*/
1389: PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal time, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1390: {
1391:   PetscViewer viewer = vf->viewer;

1393:   PetscFunctionBegin;
1394:   (void)v;
1395:   (void)numcost;
1396:   (void)lambda;
1397:   (void)mu;
1399:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1400:   PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel));
1401:   PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)time, ts->steprollback ? " (r)\n" : "\n"));
1402:   PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel));
1403:   PetscCall(PetscViewerPopFormat(viewer));
1404:   PetscFunctionReturn(PETSC_SUCCESS);
1405: }

1407: /*@C
1408:   TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling
1409:   `VecView()` for the sensitivities to initial states at each timestep

1411:   Collective

1413:   Input Parameters:
1414: + ts      - the `TS` context
1415: . step    - current time-step
1416: . ptime   - current time
1417: . u       - current state
1418: . numcost - number of cost functions
1419: . lambda  - sensitivities to initial conditions
1420: . mu      - sensitivities to parameters
1421: - dummy   - either a viewer or `NULL`

1423:   Level: intermediate

1425: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()`
1426: @*/
1427: PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *dummy)
1428: {
1429:   TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1430:   PetscDraw        draw;
1431:   PetscReal        xl, yl, xr, yr, h;
1432:   char             time[32];

1434:   PetscFunctionBegin;
1435:   if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS);

1437:   PetscCall(VecView(lambda[0], ictx->viewer));
1438:   PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw));
1439:   PetscCall(PetscSNPrintf(time, 32, "Timestep %" PetscInt_FMT " Time %g", step, (double)ptime));
1440:   PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
1441:   h = yl + .95 * (yr - yl);
1442:   PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time));
1443:   PetscCall(PetscDrawFlush(draw));
1444:   PetscFunctionReturn(PETSC_SUCCESS);
1445: }

1447: /*@C
1448:   TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from options database.

1450:   Collective

1452:   Input Parameters:
1453: + ts                 - the `TS` context
1454: - PetscOptionsObject - the options context

1456:   Options Database Keys:
1457: + -ts_adjoint_solve <yes,no>     - After solving the ODE/DAE solve the adjoint problem (requires `-ts_save_trajectory`)
1458: . -ts_adjoint_monitor            - print information at each adjoint time step
1459: - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically

1461:   Level: developer

1463:   Note:
1464:   This is not normally called directly by users

1466: .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()`
1467: @*/
1468: PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems PetscOptionsObject)
1469: {
1470:   PetscBool tflg, opt;

1472:   PetscFunctionBegin;
1474:   PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options");
1475:   tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1476:   PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt));
1477:   if (opt) {
1478:     PetscCall(TSSetSaveTrajectory(ts));
1479:     ts->adjoint_solve = tflg;
1480:   }
1481:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL));
1482:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL));
1483:   opt = PETSC_FALSE;
1484:   PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt));
1485:   if (opt) {
1486:     TSMonitorDrawCtx ctx;
1487:     PetscInt         howoften = 1;

1489:     PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL));
1490:     PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx));
1491:     PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscCtxDestroyFn *)TSMonitorDrawCtxDestroy));
1492:   }
1493:   PetscFunctionReturn(PETSC_SUCCESS);
1494: }

1496: /*@
1497:   TSAdjointStep - Steps one time step backward in the adjoint run

1499:   Collective

1501:   Input Parameter:
1502: . ts - the `TS` context obtained from `TSCreate()`

1504:   Level: intermediate

1506: .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()`
1507: @*/
1508: PetscErrorCode TSAdjointStep(TS ts)
1509: {
1510:   DM dm;

1512:   PetscFunctionBegin;
1514:   PetscCall(TSGetDM(ts, &dm));
1515:   PetscCall(TSAdjointSetUp(ts));
1516:   ts->steps--; /* must decrease the step index before the adjoint step is taken. */

1518:   ts->reason     = TS_CONVERGED_ITERATING;
1519:   ts->ptime_prev = ts->ptime;
1520:   PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0));
1521:   PetscUseTypeMethod(ts, adjointstep);
1522:   PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0));
1523:   ts->adjoint_steps++;

1525:   if (ts->reason < 0) {
1526:     PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]);
1527:   } else if (!ts->reason) {
1528:     if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1529:   }
1530:   PetscFunctionReturn(PETSC_SUCCESS);
1531: }

1533: /*@
1534:   TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE

1536:   Collective
1537:   `

1539:   Input Parameter:
1540: . ts - the `TS` context obtained from `TSCreate()`

1542:   Options Database Key:
1543: . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values

1545:   Level: intermediate

1547:   Notes:
1548:   This must be called after a call to `TSSolve()` that solves the forward problem

1550:   By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time

1552: .seealso: [](ch_ts), `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()`
1553: @*/
1554: PetscErrorCode TSAdjointSolve(TS ts)
1555: {
1556:   static PetscBool cite = PETSC_FALSE;
1557: #if defined(TSADJOINT_STAGE)
1558:   PetscLogStage adjoint_stage;
1559: #endif

1561:   PetscFunctionBegin;
1563:   PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n"
1564:                                    "  title         = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n"
1565:                                    "  author        = {Zhang, Hong and Constantinescu, Emil M.  and Smith, Barry F.},\n"
1566:                                    "  journal       = {SIAM Journal on Scientific Computing},\n"
1567:                                    "  volume        = {44},\n"
1568:                                    "  number        = {1},\n"
1569:                                    "  pages         = {C1-C24},\n"
1570:                                    "  doi           = {10.1137/21M140078X},\n"
1571:                                    "  year          = {2022}\n}\n",
1572:                                    &cite));
1573: #if defined(TSADJOINT_STAGE)
1574:   PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage));
1575:   PetscCall(PetscLogStagePush(adjoint_stage));
1576: #endif
1577:   PetscCall(TSAdjointSetUp(ts));

1579:   /* reset time step and iteration counters */
1580:   ts->adjoint_steps     = 0;
1581:   ts->ksp_its           = 0;
1582:   ts->snes_its          = 0;
1583:   ts->num_snes_failures = 0;
1584:   ts->reject            = 0;
1585:   ts->reason            = TS_CONVERGED_ITERATING;

1587:   if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1588:   if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;

1590:   while (!ts->reason) {
1591:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1592:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1593:     PetscCall(TSAdjointEventHandler(ts));
1594:     PetscCall(TSAdjointStep(ts));
1595:     if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts));
1596:   }
1597:   if (!ts->steps) {
1598:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1599:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1600:   }
1601:   ts->solvetime = ts->ptime;
1602:   PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view"));
1603:   PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution"));
1604:   ts->adjoint_max_steps = 0;
1605: #if defined(TSADJOINT_STAGE)
1606:   PetscCall(PetscLogStagePop());
1607: #endif
1608:   PetscFunctionReturn(PETSC_SUCCESS);
1609: }

1611: /*@C
1612:   TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()`

1614:   Collective

1616:   Input Parameters:
1617: + ts      - time stepping context obtained from `TSCreate()`
1618: . step    - step number that has just completed
1619: . ptime   - model time of the state
1620: . u       - state at the current model time
1621: . numcost - number of cost functions (dimension of lambda  or mu)
1622: . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1623: - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters

1625:   Level: developer

1627:   Note:
1628:   `TSAdjointMonitor()` is typically used automatically within the time stepping implementations.
1629:   Users would almost never call this routine directly.

1631: .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()`
1632: @*/
1633: PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu)
1634: {
1635:   PetscInt i, n = ts->numberadjointmonitors;

1637:   PetscFunctionBegin;
1640:   PetscCall(VecLockReadPush(u));
1641:   for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i]));
1642:   PetscCall(VecLockReadPop(u));
1643:   PetscFunctionReturn(PETSC_SUCCESS);
1644: }

1646: /*@
1647:   TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.

1649:   Collective

1651:   Input Parameter:
1652: . ts - time stepping context

1654:   Level: advanced

1656:   Notes:
1657:   This function cannot be called until `TSAdjointStep()` has been completed.

1659: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()`
1660:  @*/
1661: PetscErrorCode TSAdjointCostIntegral(TS ts)
1662: {
1663:   PetscFunctionBegin;
1665:   PetscUseTypeMethod(ts, adjointintegral);
1666:   PetscFunctionReturn(PETSC_SUCCESS);
1667: }

1669: /* ------------------ Forward (tangent linear) sensitivity  ------------------*/

1671: /*@
1672:   TSForwardSetUp - Sets up the internal data structures for the later use
1673:   of forward sensitivity analysis

1675:   Collective

1677:   Input Parameter:
1678: . ts - the `TS` context obtained from `TSCreate()`

1680:   Level: advanced

1682: .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()`
1683: @*/
1684: PetscErrorCode TSForwardSetUp(TS ts)
1685: {
1686:   PetscFunctionBegin;
1688:   if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1689:   PetscTryTypeMethod(ts, forwardsetup);
1690:   PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col));
1691:   ts->forwardsetupcalled = PETSC_TRUE;
1692:   PetscFunctionReturn(PETSC_SUCCESS);
1693: }

1695: /*@
1696:   TSForwardReset - Reset the internal data structures used by forward sensitivity analysis

1698:   Collective

1700:   Input Parameter:
1701: . ts - the `TS` context obtained from `TSCreate()`

1703:   Level: advanced

1705: .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()`
1706: @*/
1707: PetscErrorCode TSForwardReset(TS ts)
1708: {
1709:   TS quadts = ts->quadraturets;

1711:   PetscFunctionBegin;
1713:   PetscTryTypeMethod(ts, forwardreset);
1714:   PetscCall(MatDestroy(&ts->mat_sensip));
1715:   if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip));
1716:   PetscCall(VecDestroy(&ts->vec_sensip_col));
1717:   ts->forward_solve      = PETSC_FALSE;
1718:   ts->forwardsetupcalled = PETSC_FALSE;
1719:   PetscFunctionReturn(PETSC_SUCCESS);
1720: }

1722: /*@
1723:   TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.

1725:   Input Parameters:
1726: + ts        - the `TS` context obtained from `TSCreate()`
1727: . numfwdint - number of integrals
1728: - vp        - the vectors containing the gradients for each integral w.r.t. parameters

1730:   Level: deprecated

1732: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1733: @*/
1734: PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec *vp)
1735: {
1736:   PetscFunctionBegin;
1738:   PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1739:   if (!ts->numcost) ts->numcost = numfwdint;

1741:   ts->vecs_integral_sensip = vp;
1742:   PetscFunctionReturn(PETSC_SUCCESS);
1743: }

1745: /*@
1746:   TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term.

1748:   Input Parameter:
1749: . ts - the `TS` context obtained from `TSCreate()`

1751:   Output Parameters:
1752: + numfwdint - number of integrals
1753: - vp        - the vectors containing the gradients for each integral w.r.t. parameters

1755:   Level: deprecated

1757: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardStep()`
1758: @*/
1759: PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec **vp)
1760: {
1761:   PetscFunctionBegin;
1763:   PetscAssertPointer(vp, 3);
1764:   if (numfwdint) *numfwdint = ts->numcost;
1765:   if (vp) *vp = ts->vecs_integral_sensip;
1766:   PetscFunctionReturn(PETSC_SUCCESS);
1767: }

1769: /*@
1770:   TSForwardStep - Compute the forward sensitivity for one time step.

1772:   Collective

1774:   Input Parameter:
1775: . ts - time stepping context

1777:   Level: advanced

1779:   Notes:
1780:   This function cannot be called until `TSStep()` has been completed.

1782: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()`
1783: @*/
1784: PetscErrorCode TSForwardStep(TS ts)
1785: {
1786:   PetscFunctionBegin;
1788:   PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0));
1789:   PetscUseTypeMethod(ts, forwardstep);
1790:   PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0));
1791:   PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]);
1792:   PetscFunctionReturn(PETSC_SUCCESS);
1793: }

1795: /*@
1796:   TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution  w.r.t. the problem parameters and initial values.

1798:   Logically Collective

1800:   Input Parameters:
1801: + ts   - the `TS` context obtained from `TSCreate()`
1802: . nump - number of parameters
1803: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1805:   Level: beginner

1807:   Notes:
1808:   Use `PETSC_DETERMINE` to use the number of columns of `Smat` for `nump`

1810:   Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1811:   This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically.
1812:   You must call this function before `TSSolve()`.
1813:   The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.

1815: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1816: @*/
1817: PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat)
1818: {
1819:   PetscFunctionBegin;
1822:   ts->forward_solve = PETSC_TRUE;
1823:   if (nump == PETSC_DEFAULT || nump == PETSC_DETERMINE) {
1824:     PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters));
1825:   } else ts->num_parameters = nump;
1826:   PetscCall(PetscObjectReference((PetscObject)Smat));
1827:   PetscCall(MatDestroy(&ts->mat_sensip));
1828:   ts->mat_sensip = Smat;
1829:   PetscFunctionReturn(PETSC_SUCCESS);
1830: }

1832: /*@
1833:   TSForwardGetSensitivities - Returns the trajectory sensitivities

1835:   Not Collective, but Smat returned is parallel if ts is parallel

1837:   Output Parameters:
1838: + ts   - the `TS` context obtained from `TSCreate()`
1839: . nump - number of parameters
1840: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1842:   Level: intermediate

1844: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1845: @*/
1846: PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat)
1847: {
1848:   PetscFunctionBegin;
1850:   if (nump) *nump = ts->num_parameters;
1851:   if (Smat) *Smat = ts->mat_sensip;
1852:   PetscFunctionReturn(PETSC_SUCCESS);
1853: }

1855: /*@
1856:   TSForwardCostIntegral - Evaluate the cost integral in the forward run.

1858:   Collective

1860:   Input Parameter:
1861: . ts - time stepping context

1863:   Level: advanced

1865:   Note:
1866:   This function cannot be called until `TSStep()` has been completed.

1868: .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()`
1869: @*/
1870: PetscErrorCode TSForwardCostIntegral(TS ts)
1871: {
1872:   PetscFunctionBegin;
1874:   PetscUseTypeMethod(ts, forwardintegral);
1875:   PetscFunctionReturn(PETSC_SUCCESS);
1876: }

1878: /*@
1879:   TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities

1881:   Collective

1883:   Input Parameters:
1884: + ts   - the `TS` context obtained from `TSCreate()`
1885: - didp - parametric sensitivities of the initial condition

1887:   Level: intermediate

1889:   Notes:
1890:   `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way.
1891:   This function is used to set initial values for tangent linear variables.

1893: .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()`
1894: @*/
1895: PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp)
1896: {
1897:   PetscFunctionBegin;
1900:   if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DETERMINE, didp));
1901:   PetscFunctionReturn(PETSC_SUCCESS);
1902: }

1904: /*@
1905:   TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages

1907:   Input Parameter:
1908: . ts - the `TS` context obtained from `TSCreate()`

1910:   Output Parameters:
1911: + ns - number of stages
1912: - S  - tangent linear sensitivities at the intermediate stages

1914:   Level: advanced

1916: .seealso: `TS`
1917: @*/
1918: PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S)
1919: {
1920:   PetscFunctionBegin;

1923:   if (!ts->ops->getstages) *S = NULL;
1924:   else PetscUseTypeMethod(ts, forwardgetstages, ns, S);
1925:   PetscFunctionReturn(PETSC_SUCCESS);
1926: }

1928: /*@
1929:   TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time

1931:   Input Parameters:
1932: + ts  - the `TS` context obtained from `TSCreate()`
1933: - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run

1935:   Output Parameter:
1936: . quadts - the child `TS` context

1938:   Level: intermediate

1940: .seealso: [](ch_ts), `TSGetQuadratureTS()`
1941: @*/
1942: PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts)
1943: {
1944:   char prefix[128];

1946:   PetscFunctionBegin;
1948:   PetscAssertPointer(quadts, 3);
1949:   PetscCall(TSDestroy(&ts->quadraturets));
1950:   PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets));
1951:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1));
1952:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1953:   PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix));
1954:   *quadts = ts->quadraturets;

1956:   if (ts->numcost) {
1957:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol));
1958:   } else {
1959:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol));
1960:   }
1961:   ts->costintegralfwd = fwd;
1962:   PetscFunctionReturn(PETSC_SUCCESS);
1963: }

1965: /*@
1966:   TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time

1968:   Input Parameter:
1969: . ts - the `TS` context obtained from `TSCreate()`

1971:   Output Parameters:
1972: + fwd    - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1973: - quadts - the child `TS` context

1975:   Level: intermediate

1977: .seealso: [](ch_ts), `TSCreateQuadratureTS()`
1978: @*/
1979: PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts)
1980: {
1981:   PetscFunctionBegin;
1983:   if (fwd) *fwd = ts->costintegralfwd;
1984:   if (quadts) *quadts = ts->quadraturets;
1985:   PetscFunctionReturn(PETSC_SUCCESS);
1986: }

1988: /*@
1989:   TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS`

1991:   Collective

1993:   Input Parameters:
1994: + ts - the `TS` context obtained from `TSCreate()`
1995: - x  - state vector

1997:   Output Parameters:
1998: + J    - Jacobian matrix
1999: - Jpre - matrix used to compute the preconditioner for `J` (may be same as `J`)

2001:   Level: developer

2003:   Note:
2004:   Uses finite differencing when `TS` Jacobian is not available.

2006: .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()`
2007: @*/
2008: PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre)
2009: {
2010:   SNES snes                                          = ts->snes;
2011:   PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL;

2013:   PetscFunctionBegin;
2014:   /*
2015:     Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
2016:     because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
2017:     explicit methods. Instead, we check the Jacobian compute function directly to determine if FD
2018:     coloring is used.
2019:   */
2020:   PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL));
2021:   if (jac == SNESComputeJacobianDefaultColor) {
2022:     Vec f;
2023:     PetscCall(SNESSetSolution(snes, x));
2024:     PetscCall(SNESGetFunction(snes, &f, NULL, NULL));
2025:     /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
2026:     PetscCall(SNESComputeFunction(snes, x, f));
2027:   }
2028:   PetscCall(SNESComputeJacobian(snes, x, J, Jpre));
2029:   PetscFunctionReturn(PETSC_SUCCESS);
2030: }