Actual source code: ex29.c
1: /*
2: Added at the request of Marc Garbey.
4: Inhomogeneous Laplacian in 2D. Modeled by the partial differential equation
6: -div \rho grad u = f, 0 < x,y < 1,
8: with forcing function
10: f = e^{-x^2/\nu} e^{-y^2/\nu}
12: with Dirichlet boundary conditions
14: u = f(x,y) for x = 0, x = 1, y = 0, y = 1
16: or pure Neumman boundary conditions
18: This uses multigrid to solve the linear system
19: */
21: static char help[] = "Solves 2D inhomogeneous Laplacian using multigrid.\n\n";
23: #include <petscdm.h>
24: #include <petscdmda.h>
25: #include <petscksp.h>
27: extern PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
28: extern PetscErrorCode ComputeRHS(KSP, Vec, void *);
30: typedef enum {
31: DIRICHLET,
32: NEUMANN
33: } BCType;
35: typedef struct {
36: PetscReal rho;
37: PetscReal nu;
38: BCType bcType;
39: } UserContext;
41: int main(int argc, char **argv)
42: {
43: KSP ksp;
44: DM da;
45: UserContext user;
46: const char *bcTypes[2] = {"dirichlet", "neumann"};
47: PetscInt bc;
48: Vec b, x;
49: PetscBool testsolver = PETSC_FALSE;
51: PetscFunctionBeginUser;
52: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
53: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
54: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 3, 3, PETSC_DECIDE, PETSC_DECIDE, 1, 1, 0, 0, &da));
55: PetscCall(DMSetFromOptions(da));
56: PetscCall(DMSetUp(da));
57: PetscCall(DMDASetUniformCoordinates(da, 0, 1, 0, 1, 0, 0));
58: PetscCall(DMDASetFieldName(da, 0, "Pressure"));
60: PetscOptionsBegin(PETSC_COMM_WORLD, "", "Options for the inhomogeneous Poisson equation", "DMqq");
61: user.rho = 1.0;
62: PetscCall(PetscOptionsReal("-rho", "The conductivity", "ex29.c", user.rho, &user.rho, NULL));
63: user.nu = 0.1;
64: PetscCall(PetscOptionsReal("-nu", "The width of the Gaussian source", "ex29.c", user.nu, &user.nu, NULL));
65: bc = (PetscInt)DIRICHLET;
66: PetscCall(PetscOptionsEList("-bc_type", "Type of boundary condition", "ex29.c", bcTypes, 2, bcTypes[0], &bc, NULL));
67: user.bcType = (BCType)bc;
68: PetscCall(PetscOptionsBool("-testsolver", "Run solver multiple times, useful for performance studies of solver", "ex29.c", testsolver, &testsolver, NULL));
69: PetscOptionsEnd();
71: PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, &user));
72: PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix, &user));
73: PetscCall(KSPSetDM(ksp, da));
74: PetscCall(KSPSetFromOptions(ksp));
75: PetscCall(KSPSetUp(ksp));
76: PetscCall(KSPSolve(ksp, NULL, NULL));
78: if (testsolver) {
79: PetscCall(KSPGetSolution(ksp, &x));
80: PetscCall(KSPGetRhs(ksp, &b));
81: PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
82: PetscCall(KSPSolve(ksp, b, x));
83: {
84: PetscLogStage stage;
85: PetscInt i, n = 20;
87: PetscCall(PetscLogStageRegister("Solve only", &stage));
88: PetscCall(PetscLogStagePush(stage));
89: for (i = 0; i < n; i++) PetscCall(KSPSolve(ksp, b, x));
90: PetscCall(PetscLogStagePop());
91: }
92: }
94: PetscCall(DMDestroy(&da));
95: PetscCall(KSPDestroy(&ksp));
96: PetscCall(PetscFinalize());
97: return 0;
98: }
100: PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ctx)
101: {
102: UserContext *user = (UserContext *)ctx;
103: PetscInt i, j, mx, my, xm, ym, xs, ys;
104: PetscScalar Hx, Hy, HydHx, HxdHy;
105: PetscScalar **array;
106: DM da;
108: PetscFunctionBeginUser;
109: PetscCall(KSPGetDM(ksp, &da));
110: PetscCall(DMDAGetInfo(da, 0, &mx, &my, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
111: Hx = 1.0 / (PetscReal)(mx - 1);
112: Hy = 1.0 / (PetscReal)(my - 1);
113: HxdHy = Hx / Hy;
114: HydHx = Hy / Hx;
115: PetscCall(DMDAGetCorners(da, &xs, &ys, 0, &xm, &ym, 0));
116: PetscCall(DMDAVecGetArray(da, b, &array));
117: for (j = ys; j < ys + ym; j++) {
118: for (i = xs; i < xs + xm; i++) {
119: if (user->bcType == DIRICHLET && (i == 0 || j == 0 || i == mx - 1 || j == my - 1)) {
120: array[j][i] = PetscExpScalar(-((PetscReal)i * Hx) * ((PetscReal)i * Hx) / user->nu) * PetscExpScalar(-((PetscReal)j * Hy) * ((PetscReal)j * Hy) / user->nu) * 2.0 * (HxdHy + HydHx);
121: } else {
122: array[j][i] = PetscExpScalar(-((PetscReal)i * Hx) * ((PetscReal)i * Hx) / user->nu) * PetscExpScalar(-((PetscReal)j * Hy) * ((PetscReal)j * Hy) / user->nu) * Hx * Hy;
123: }
124: }
125: }
126: PetscCall(DMDAVecRestoreArray(da, b, &array));
127: PetscCall(VecAssemblyBegin(b));
128: PetscCall(VecAssemblyEnd(b));
130: /* force right-hand side to be consistent for singular matrix */
131: /* note this is really a hack, normally the model would provide you with a consistent right handside */
132: if (user->bcType == NEUMANN) {
133: MatNullSpace nullspace;
135: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
136: PetscCall(MatNullSpaceRemove(nullspace, b));
137: PetscCall(MatNullSpaceDestroy(&nullspace));
138: }
139: PetscFunctionReturn(PETSC_SUCCESS);
140: }
142: PetscErrorCode ComputeRho(PetscInt i, PetscInt j, PetscInt mx, PetscInt my, PetscReal centerRho, PetscReal *rho)
143: {
144: PetscFunctionBeginUser;
145: if ((i > mx / 3.0) && (i < 2.0 * mx / 3.0) && (j > my / 3.0) && (j < 2.0 * my / 3.0)) {
146: *rho = centerRho;
147: } else {
148: *rho = 1.0;
149: }
150: PetscFunctionReturn(PETSC_SUCCESS);
151: }
153: PetscErrorCode ComputeMatrix(KSP ksp, Mat J, Mat jac, void *ctx)
154: {
155: UserContext *user = (UserContext *)ctx;
156: PetscReal centerRho;
157: PetscInt i, j, mx, my, xm, ym, xs, ys;
158: PetscScalar v[5];
159: PetscReal Hx, Hy, HydHx, HxdHy, rho;
160: MatStencil row, col[5];
161: DM da;
162: PetscBool check_matis = PETSC_FALSE;
164: PetscFunctionBeginUser;
165: PetscCall(KSPGetDM(ksp, &da));
166: centerRho = user->rho;
167: PetscCall(DMDAGetInfo(da, 0, &mx, &my, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
168: Hx = 1.0 / (PetscReal)(mx - 1);
169: Hy = 1.0 / (PetscReal)(my - 1);
170: HxdHy = Hx / Hy;
171: HydHx = Hy / Hx;
172: PetscCall(DMDAGetCorners(da, &xs, &ys, 0, &xm, &ym, 0));
173: for (j = ys; j < ys + ym; j++) {
174: for (i = xs; i < xs + xm; i++) {
175: row.i = i;
176: row.j = j;
177: PetscCall(ComputeRho(i, j, mx, my, centerRho, &rho));
178: if (i == 0 || j == 0 || i == mx - 1 || j == my - 1) {
179: if (user->bcType == DIRICHLET) {
180: v[0] = 2.0 * rho * (HxdHy + HydHx);
181: PetscCall(MatSetValuesStencil(jac, 1, &row, 1, &row, v, INSERT_VALUES));
182: } else if (user->bcType == NEUMANN) {
183: PetscInt numx = 0, numy = 0, num = 0;
184: if (j != 0) {
185: v[num] = -rho * HxdHy;
186: col[num].i = i;
187: col[num].j = j - 1;
188: numy++;
189: num++;
190: }
191: if (i != 0) {
192: v[num] = -rho * HydHx;
193: col[num].i = i - 1;
194: col[num].j = j;
195: numx++;
196: num++;
197: }
198: if (i != mx - 1) {
199: v[num] = -rho * HydHx;
200: col[num].i = i + 1;
201: col[num].j = j;
202: numx++;
203: num++;
204: }
205: if (j != my - 1) {
206: v[num] = -rho * HxdHy;
207: col[num].i = i;
208: col[num].j = j + 1;
209: numy++;
210: num++;
211: }
212: v[num] = numx * rho * HydHx + numy * rho * HxdHy;
213: col[num].i = i;
214: col[num].j = j;
215: num++;
216: PetscCall(MatSetValuesStencil(jac, 1, &row, num, col, v, INSERT_VALUES));
217: }
218: } else {
219: v[0] = -rho * HxdHy;
220: col[0].i = i;
221: col[0].j = j - 1;
222: v[1] = -rho * HydHx;
223: col[1].i = i - 1;
224: col[1].j = j;
225: v[2] = 2.0 * rho * (HxdHy + HydHx);
226: col[2].i = i;
227: col[2].j = j;
228: v[3] = -rho * HydHx;
229: col[3].i = i + 1;
230: col[3].j = j;
231: v[4] = -rho * HxdHy;
232: col[4].i = i;
233: col[4].j = j + 1;
234: PetscCall(MatSetValuesStencil(jac, 1, &row, 5, col, v, INSERT_VALUES));
235: }
236: }
237: }
238: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
239: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
240: PetscCall(MatViewFromOptions(jac, NULL, "-view_mat"));
241: PetscCall(PetscOptionsGetBool(NULL, NULL, "-check_matis", &check_matis, NULL));
242: if (check_matis) {
243: void (*f)(void);
244: Mat J2;
245: MatType jtype;
246: PetscReal nrm;
248: PetscCall(MatGetType(jac, &jtype));
249: PetscCall(MatConvert(jac, MATIS, MAT_INITIAL_MATRIX, &J2));
250: PetscCall(MatViewFromOptions(J2, NULL, "-view_conv"));
251: PetscCall(MatConvert(J2, jtype, MAT_INPLACE_MATRIX, &J2));
252: PetscCall(MatGetOperation(jac, MATOP_VIEW, &f));
253: PetscCall(MatSetOperation(J2, MATOP_VIEW, f));
254: PetscCall(MatSetDM(J2, da));
255: PetscCall(MatViewFromOptions(J2, NULL, "-view_conv_assembled"));
256: PetscCall(MatAXPY(J2, -1., jac, DIFFERENT_NONZERO_PATTERN));
257: PetscCall(MatNorm(J2, NORM_FROBENIUS, &nrm));
258: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error MATIS %g\n", (double)nrm));
259: PetscCall(MatViewFromOptions(J2, NULL, "-view_conv_err"));
260: PetscCall(MatDestroy(&J2));
261: }
262: if (user->bcType == NEUMANN) {
263: MatNullSpace nullspace;
265: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
266: PetscCall(MatSetNullSpace(J, nullspace));
267: PetscCall(MatNullSpaceDestroy(&nullspace));
268: }
269: PetscFunctionReturn(PETSC_SUCCESS);
270: }
272: /*TEST
274: test:
275: args: -pc_type mg -pc_mg_type full -ksp_type fgmres -ksp_monitor_short -da_refine 8 -ksp_rtol 1.e-3
277: test:
278: suffix: 2
279: args: -bc_type neumann -pc_type mg -pc_mg_type full -ksp_type fgmres -ksp_monitor_short -da_refine 8 -mg_coarse_pc_factor_shift_type nonzero
280: requires: !single
282: test:
283: suffix: telescope
284: nsize: 4
285: args: -ksp_monitor_short -da_grid_x 257 -da_grid_y 257 -pc_type mg -pc_mg_galerkin pmat -pc_mg_levels 4 -ksp_type richardson -mg_levels_ksp_type chebyshev -mg_levels_pc_type jacobi -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_ignore_kspcomputeoperators -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_galerkin pmat -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_levels_ksp_type chebyshev -mg_coarse_telescope_mg_levels_pc_type jacobi -mg_coarse_pc_telescope_reduction_factor 4
287: test:
288: suffix: 3
289: args: -ksp_view -da_refine 2 -pc_type mg -pc_mg_distinct_smoothup -mg_levels_up_pc_type jacobi
291: test:
292: suffix: 4
293: args: -ksp_view -da_refine 2 -pc_type mg -pc_mg_distinct_smoothup -mg_levels_up_ksp_max_it 3 -mg_levels_ksp_max_it 4
295: testset:
296: suffix: aniso
297: args: -da_grid_x 10 -da_grid_y 2 -da_refine 2 -pc_type mg -ksp_monitor_short -mg_levels_ksp_max_it 6 -mg_levels_pc_type jacobi
298: test:
299: suffix: first
300: args: -mg_levels_ksp_chebyshev_kind first
301: test:
302: suffix: fourth
303: args: -mg_levels_ksp_chebyshev_kind fourth
304: test:
305: suffix: opt_fourth
306: args: -mg_levels_ksp_chebyshev_kind opt_fourth
308: test:
309: suffix: 5
310: nsize: 2
311: requires: hypre !complex
312: args: -pc_type mg -da_refine 2 -ksp_monitor -matptap_via hypre -pc_mg_galerkin both
314: test:
315: suffix: 6
316: args: -pc_type svd -pc_svd_monitor ::all
318: TEST*/