Actual source code: ex12.c
1: static char help[] = "Tests PetscObjectSetOptions() for TS object\n\n";
3: /* ------------------------------------------------------------------------
5: This program solves the PDE
7: u * u_xx
8: u_t = ---------
9: 2*(t+1)^2
11: on the domain 0 <= x <= 1, with boundary conditions
12: u(t,0) = t + 1, u(t,1) = 2*t + 2,
13: and initial condition
14: u(0,x) = 1 + x*x.
16: The exact solution is:
17: u(t,x) = (1 + x*x) * (1 + t)
19: Note that since the solution is linear in time and quadratic in x,
20: the finite difference scheme actually computes the "exact" solution.
22: We use by default the backward Euler method.
24: ------------------------------------------------------------------------- */
26: /*
27: Include "petscts.h" to use the PETSc timestepping routines. Note that
28: this file automatically includes "petscsys.h" and other lower-level
29: PETSc include files.
31: Include the "petscdmda.h" to allow us to use the distributed array data
32: structures to manage the parallel grid.
33: */
34: #include <petscts.h>
35: #include <petscdm.h>
36: #include <petscdmda.h>
37: #include <petscdraw.h>
39: /*
40: User-defined application context - contains data needed by the
41: application-provided callback routines.
42: */
43: typedef struct {
44: MPI_Comm comm; /* communicator */
45: DM da; /* distributed array data structure */
46: Vec localwork; /* local ghosted work vector */
47: Vec u_local; /* local ghosted approximate solution vector */
48: Vec solution; /* global exact solution vector */
49: PetscInt m; /* total number of grid points */
50: PetscReal h; /* mesh width: h = 1/(m-1) */
51: } AppCtx;
53: /*
54: User-defined routines, provided below.
55: */
56: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
57: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
58: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
59: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
61: int main(int argc, char **argv)
62: {
63: AppCtx appctx; /* user-defined application context */
64: TS ts; /* timestepping context */
65: Mat A; /* Jacobian matrix data structure */
66: Vec u; /* approximate solution vector */
67: PetscInt time_steps_max = 100; /* default max timesteps */
68: PetscReal dt;
69: PetscReal time_total_max = 100.0; /* default max total time */
70: PetscOptions options, optionscopy;
72: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
73: Initialize program and set problem parameters
74: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
76: PetscFunctionBeginUser;
77: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
79: PetscCall(PetscOptionsCreate(&options));
80: PetscCall(PetscOptionsSetValue(options, "-ts_monitor", "ascii"));
81: PetscCall(PetscOptionsSetValue(options, "-snes_monitor", "ascii"));
82: PetscCall(PetscOptionsSetValue(options, "-ksp_monitor", "ascii"));
84: appctx.comm = PETSC_COMM_WORLD;
85: appctx.m = 60;
87: PetscCall(PetscOptionsGetInt(options, NULL, "-M", &appctx.m, NULL));
89: appctx.h = 1.0 / (appctx.m - 1.0);
91: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
92: Create vector data structures
93: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
95: /*
96: Create distributed array (DMDA) to manage parallel grid and vectors
97: and to set up the ghost point communication pattern. There are M
98: total grid values spread equally among all the processors.
99: */
100: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
101: PetscCall(PetscObjectSetOptions((PetscObject)appctx.da, options));
102: PetscCall(DMSetFromOptions(appctx.da));
103: PetscCall(DMSetUp(appctx.da));
105: /*
106: Extract global and local vectors from DMDA; we use these to store the
107: approximate solution. Then duplicate these for remaining vectors that
108: have the same types.
109: */
110: PetscCall(DMCreateGlobalVector(appctx.da, &u));
111: PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
113: /*
114: Create local work vector for use in evaluating right-hand-side function;
115: create global work vector for storing exact solution.
116: */
117: PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
118: PetscCall(VecDuplicate(u, &appctx.solution));
120: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: Create timestepping solver context; set callback routine for
122: right-hand-side function evaluation.
123: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
125: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
126: PetscCall(PetscObjectSetOptions((PetscObject)ts, options));
127: PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
128: PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
130: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131: For nonlinear problems, the user can provide a Jacobian evaluation
132: routine (or use a finite differencing approximation).
134: Create matrix data structure; set Jacobian evaluation routine.
135: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
137: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
138: PetscCall(PetscObjectSetOptions((PetscObject)A, options));
139: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
140: PetscCall(MatSetFromOptions(A));
141: PetscCall(MatSetUp(A));
142: PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
144: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: Set solution vector and initial timestep
146: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148: dt = appctx.h / 2.0;
149: PetscCall(TSSetTimeStep(ts, dt));
151: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: Customize timestepping solver:
153: - Set the solution method to be the Backward Euler method.
154: - Set timestepping duration info
155: Then set runtime options, which can override these defaults.
156: For example,
157: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
158: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
159: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
161: PetscCall(TSSetType(ts, TSBEULER));
162: PetscCall(TSSetMaxSteps(ts, time_steps_max));
163: PetscCall(TSSetMaxTime(ts, time_total_max));
164: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
165: PetscCall(TSSetFromOptions(ts));
167: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: Solve the problem
169: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171: /*
172: Evaluate initial conditions
173: */
174: PetscCall(InitialConditions(u, &appctx));
176: /*
177: Run the timestepping solver
178: */
179: PetscCall(TSSolve(ts, u));
181: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182: Free work space. All PETSc objects should be destroyed when they
183: are no longer needed.
184: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
186: PetscCall(PetscObjectGetOptions((PetscObject)ts, &optionscopy));
187: PetscCheck(options == optionscopy, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "PetscObjectGetOptions() failed");
189: PetscCall(TSDestroy(&ts));
190: PetscCall(VecDestroy(&u));
191: PetscCall(MatDestroy(&A));
192: PetscCall(DMDestroy(&appctx.da));
193: PetscCall(VecDestroy(&appctx.localwork));
194: PetscCall(VecDestroy(&appctx.solution));
195: PetscCall(VecDestroy(&appctx.u_local));
196: PetscCall(PetscOptionsDestroy(&options));
198: /*
199: Always call PetscFinalize() before exiting a program. This routine
200: - finalizes the PETSc libraries as well as MPI
201: - provides summary and diagnostic information if certain runtime
202: options are chosen (e.g., -log_view).
203: */
204: PetscCall(PetscFinalize());
205: return 0;
206: }
207: /* --------------------------------------------------------------------- */
208: /*
209: InitialConditions - Computes the solution at the initial time.
211: Input Parameters:
212: u - uninitialized solution vector (global)
213: appctx - user-defined application context
215: Output Parameter:
216: u - vector with solution at initial time (global)
217: */
218: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
219: {
220: PetscScalar *u_localptr, h = appctx->h, x;
221: PetscInt i, mybase, myend;
223: PetscFunctionBeginUser;
224: /*
225: Determine starting point of each processor's range of
226: grid values.
227: */
228: PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
230: /*
231: Get a pointer to vector data.
232: - For default PETSc vectors, VecGetArray() returns a pointer to
233: the data array. Otherwise, the routine is implementation dependent.
234: - You MUST call VecRestoreArray() when you no longer need access to
235: the array.
236: - Note that the Fortran interface to VecGetArray() differs from the
237: C version. See the users manual for details.
238: */
239: PetscCall(VecGetArray(u, &u_localptr));
241: /*
242: We initialize the solution array by simply writing the solution
243: directly into the array locations. Alternatively, we could use
244: VecSetValues() or VecSetValuesLocal().
245: */
246: for (i = mybase; i < myend; i++) {
247: x = h * (PetscReal)i; /* current location in global grid */
248: u_localptr[i - mybase] = 1.0 + x * x;
249: }
251: /*
252: Restore vector
253: */
254: PetscCall(VecRestoreArray(u, &u_localptr));
256: PetscFunctionReturn(PETSC_SUCCESS);
257: }
258: /* --------------------------------------------------------------------- */
259: /*
260: ExactSolution - Computes the exact solution at a given time.
262: Input Parameters:
263: t - current time
264: solution - vector in which exact solution will be computed
265: appctx - user-defined application context
267: Output Parameter:
268: solution - vector with the newly computed exact solution
269: */
270: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
271: {
272: PetscScalar *s_localptr, h = appctx->h, x;
273: PetscInt i, mybase, myend;
275: PetscFunctionBeginUser;
276: /*
277: Determine starting and ending points of each processor's
278: range of grid values
279: */
280: PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
282: /*
283: Get a pointer to vector data.
284: */
285: PetscCall(VecGetArray(solution, &s_localptr));
287: /*
288: Simply write the solution directly into the array locations.
289: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
290: */
291: for (i = mybase; i < myend; i++) {
292: x = h * (PetscReal)i;
293: s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
294: }
296: /*
297: Restore vector
298: */
299: PetscCall(VecRestoreArray(solution, &s_localptr));
300: PetscFunctionReturn(PETSC_SUCCESS);
301: }
302: /* --------------------------------------------------------------------- */
303: /*
304: RHSFunction - User-provided routine that evalues the right-hand-side
305: function of the ODE. This routine is set in the main program by
306: calling TSSetRHSFunction(). We compute:
307: global_out = F(global_in)
309: Input Parameters:
310: ts - timesteping context
311: t - current time
312: global_in - vector containing the current iterate
313: ctx - (optional) user-provided context for function evaluation.
314: In this case we use the appctx defined above.
316: Output Parameter:
317: global_out - vector containing the newly evaluated function
318: */
319: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
320: {
321: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
322: DM da = appctx->da; /* distributed array */
323: Vec local_in = appctx->u_local; /* local ghosted input vector */
324: Vec localwork = appctx->localwork; /* local ghosted work vector */
325: PetscInt i, localsize;
326: PetscMPIInt rank, size;
327: PetscScalar *copyptr, sc;
328: const PetscScalar *localptr;
330: PetscFunctionBeginUser;
331: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
332: Get ready for local function computations
333: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
334: /*
335: Scatter ghost points to local vector, using the 2-step process
336: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
337: By placing code between these two statements, computations can be
338: done while messages are in transition.
339: */
340: PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
341: PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
343: /*
344: Access directly the values in our local INPUT work array
345: */
346: PetscCall(VecGetArrayRead(local_in, &localptr));
348: /*
349: Access directly the values in our local OUTPUT work array
350: */
351: PetscCall(VecGetArray(localwork, ©ptr));
353: sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
355: /*
356: Evaluate our function on the nodes owned by this processor
357: */
358: PetscCall(VecGetLocalSize(local_in, &localsize));
360: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
361: Compute entries for the locally owned part
362: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
364: /*
365: Handle boundary conditions: This is done by using the boundary condition
366: u(t,boundary) = g(t,boundary)
367: for some function g. Now take the derivative with respect to t to obtain
368: u_{t}(t,boundary) = g_{t}(t,boundary)
370: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
371: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
372: */
373: PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
374: PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
375: if (rank == 0) copyptr[0] = 1.0;
376: if (rank == size - 1) copyptr[localsize - 1] = 2.0;
378: /*
379: Handle the interior nodes where the PDE is replace by finite
380: difference operators.
381: */
382: for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
384: /*
385: Restore vectors
386: */
387: PetscCall(VecRestoreArrayRead(local_in, &localptr));
388: PetscCall(VecRestoreArray(localwork, ©ptr));
390: /*
391: Insert values from the local OUTPUT vector into the global
392: output vector
393: */
394: PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
395: PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
397: PetscFunctionReturn(PETSC_SUCCESS);
398: }
399: /* --------------------------------------------------------------------- */
400: /*
401: RHSJacobian - User-provided routine to compute the Jacobian of
402: the nonlinear right-hand-side function of the ODE.
404: Input Parameters:
405: ts - the TS context
406: t - current time
407: global_in - global input vector
408: dummy - optional user-defined context, as set by TSetRHSJacobian()
410: Output Parameters:
411: AA - Jacobian matrix
412: BB - optionally different preconditioning matrix
413: str - flag indicating matrix structure
415: Notes:
416: RHSJacobian computes entries for the locally owned part of the Jacobian.
417: - Currently, all PETSc parallel matrix formats are partitioned by
418: contiguous chunks of rows across the processors.
419: - Each processor needs to insert only elements that it owns
420: locally (but any non-local elements will be sent to the
421: appropriate processor during matrix assembly).
422: - Always specify global row and columns of matrix entries when
423: using MatSetValues().
424: - Here, we set all entries for a particular row at once.
425: - Note that MatSetValues() uses 0-based row and column numbers
426: in Fortran as well as in C.
427: */
428: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx)
429: {
430: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
431: Vec local_in = appctx->u_local; /* local ghosted input vector */
432: DM da = appctx->da; /* distributed array */
433: PetscScalar v[3], sc;
434: const PetscScalar *localptr;
435: PetscInt i, mstart, mend, mstarts, mends, idx[3], is;
437: PetscFunctionBeginUser;
438: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
439: Get ready for local Jacobian computations
440: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
441: /*
442: Scatter ghost points to local vector, using the 2-step process
443: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
444: By placing code between these two statements, computations can be
445: done while messages are in transition.
446: */
447: PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
448: PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
450: /*
451: Get pointer to vector data
452: */
453: PetscCall(VecGetArrayRead(local_in, &localptr));
455: /*
456: Get starting and ending locally owned rows of the matrix
457: */
458: PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends));
459: mstart = mstarts;
460: mend = mends;
462: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
463: Compute entries for the locally owned part of the Jacobian.
464: - Currently, all PETSc parallel matrix formats are partitioned by
465: contiguous chunks of rows across the processors.
466: - Each processor needs to insert only elements that it owns
467: locally (but any non-local elements will be sent to the
468: appropriate processor during matrix assembly).
469: - Here, we set all entries for a particular row at once.
470: - We can set matrix entries either using either
471: MatSetValuesLocal() or MatSetValues().
472: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
474: /*
475: Set matrix rows corresponding to boundary data
476: */
477: if (mstart == 0) {
478: v[0] = 0.0;
479: PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
480: mstart++;
481: }
482: if (mend == appctx->m) {
483: mend--;
484: v[0] = 0.0;
485: PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES));
486: }
488: /*
489: Set matrix rows corresponding to interior data. We construct the
490: matrix one row at a time.
491: */
492: sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
493: for (i = mstart; i < mend; i++) {
494: idx[0] = i - 1;
495: idx[1] = i;
496: idx[2] = i + 1;
497: is = i - mstart + 1;
498: v[0] = sc * localptr[is];
499: v[1] = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
500: v[2] = sc * localptr[is];
501: PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES));
502: }
504: /*
505: Restore vector
506: */
507: PetscCall(VecRestoreArrayRead(local_in, &localptr));
509: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
510: Complete the matrix assembly process and set some options
511: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
512: /*
513: Assemble matrix, using the 2-step process:
514: MatAssemblyBegin(), MatAssemblyEnd()
515: Computations can be done while messages are in transition
516: by placing code between these two statements.
517: */
518: PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY));
519: PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY));
520: if (BB != AA) {
521: PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY));
522: PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY));
523: }
525: /*
526: Set and option to indicate that we will never add a new nonzero location
527: to the matrix. If we do, it will generate an error.
528: */
529: PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
531: PetscFunctionReturn(PETSC_SUCCESS);
532: }
534: /*TEST
536: test:
537: requires: !single
539: TEST*/