Actual source code: dmproject.c
1: #include <petsc/private/dmimpl.h>
2: #include <petscdm.h>
3: #include <petscdmda.h>
4: #include <petscdmplex.h>
5: #include <petscdmswarm.h>
6: #include <petscksp.h>
7: #include <petscblaslapack.h>
9: #include <petsc/private/dmswarmimpl.h>
10: #include "../src/dm/impls/swarm/data_bucket.h" // For DataBucket internals
12: typedef struct _projectConstraintsCtx {
13: DM dm;
14: Vec mask;
15: } projectConstraintsCtx;
17: static PetscErrorCode MatMult_GlobalToLocalNormal(Mat CtC, Vec x, Vec y)
18: {
19: DM dm;
20: Vec local, mask;
21: projectConstraintsCtx *ctx;
23: PetscFunctionBegin;
24: PetscCall(MatShellGetContext(CtC, &ctx));
25: dm = ctx->dm;
26: mask = ctx->mask;
27: PetscCall(DMGetLocalVector(dm, &local));
28: PetscCall(DMGlobalToLocalBegin(dm, x, INSERT_VALUES, local));
29: PetscCall(DMGlobalToLocalEnd(dm, x, INSERT_VALUES, local));
30: if (mask) PetscCall(VecPointwiseMult(local, mask, local));
31: PetscCall(VecSet(y, 0.));
32: PetscCall(DMLocalToGlobalBegin(dm, local, ADD_VALUES, y));
33: PetscCall(DMLocalToGlobalEnd(dm, local, ADD_VALUES, y));
34: PetscCall(DMRestoreLocalVector(dm, &local));
35: PetscFunctionReturn(PETSC_SUCCESS);
36: }
38: static PetscErrorCode DMGlobalToLocalSolve_project1(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar u[], void *ctx)
39: {
40: PetscInt f;
42: PetscFunctionBegin;
43: for (f = 0; f < Nf; f++) u[f] = 1.;
44: PetscFunctionReturn(PETSC_SUCCESS);
45: }
47: /*@
48: DMGlobalToLocalSolve - Solve for the global vector that is mapped to a given local vector by `DMGlobalToLocalBegin()`/`DMGlobalToLocalEnd()` with mode
49: `INSERT_VALUES`.
51: Collective
53: Input Parameters:
54: + dm - The `DM` object
55: . x - The local vector
56: - y - The global vector: the input value of globalVec is used as an initial guess
58: Output Parameter:
59: . y - The least-squares solution
61: Level: advanced
63: Note:
64: It is assumed that the sum of all the local vector sizes is greater than or equal to the global vector size, so the solution is
65: a least-squares solution. It is also assumed that `DMLocalToGlobalBegin()`/`DMLocalToGlobalEnd()` with mode `ADD_VALUES` is the adjoint of the
66: global-to-local map, so that the least-squares solution may be found by the normal equations.
68: If the `DM` is of type `DMPLEX`, then `y` is the solution of L' * D * L * y = L' * D * x, where D is a diagonal mask that is 1 for every point in
69: the union of the closures of the local cells and 0 otherwise. This difference is only relevant if there are anchor points that are not in the
70: closure of any local cell (see `DMPlexGetAnchors()`/`DMPlexSetAnchors()`).
72: What is L?
74: If this solves for a global vector from a local vector why is not called `DMLocalToGlobalSolve()`?
76: .seealso: [](ch_ksp), `DM`, `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToGlobalEnd()`, `DMPlexGetAnchors()`, `DMPlexSetAnchors()`
77: @*/
78: PetscErrorCode DMGlobalToLocalSolve(DM dm, Vec x, Vec y)
79: {
80: Mat CtC;
81: PetscInt n, N, cStart, cEnd, c;
82: PetscBool isPlex;
83: KSP ksp;
84: PC pc;
85: Vec global, mask = NULL;
86: projectConstraintsCtx ctx;
88: PetscFunctionBegin;
89: PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &isPlex));
90: if (isPlex) {
91: /* mark points in the closure */
92: PetscCall(DMCreateLocalVector(dm, &mask));
93: PetscCall(VecSet(mask, 0.0));
94: PetscCall(DMPlexGetSimplexOrBoxCells(dm, 0, &cStart, &cEnd));
95: if (cEnd > cStart) {
96: PetscScalar *ones;
97: PetscInt numValues, i;
99: PetscCall(DMPlexVecGetClosure(dm, NULL, mask, cStart, &numValues, NULL));
100: PetscCall(PetscMalloc1(numValues, &ones));
101: for (i = 0; i < numValues; i++) ones[i] = 1.;
102: for (c = cStart; c < cEnd; c++) PetscCall(DMPlexVecSetClosure(dm, NULL, mask, c, ones, INSERT_VALUES));
103: PetscCall(PetscFree(ones));
104: }
105: } else {
106: PetscBool hasMask;
108: PetscCall(DMHasNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &hasMask));
109: if (!hasMask) {
110: PetscErrorCode (**func)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx);
111: void **ctx;
112: PetscInt Nf, f;
114: PetscCall(DMGetNumFields(dm, &Nf));
115: PetscCall(PetscMalloc2(Nf, &func, Nf, &ctx));
116: for (f = 0; f < Nf; ++f) {
117: func[f] = DMGlobalToLocalSolve_project1;
118: ctx[f] = NULL;
119: }
120: PetscCall(DMGetNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
121: PetscCall(DMProjectFunctionLocal(dm, 0.0, func, ctx, INSERT_ALL_VALUES, mask));
122: PetscCall(DMRestoreNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
123: PetscCall(PetscFree2(func, ctx));
124: }
125: PetscCall(DMGetNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
126: }
127: ctx.dm = dm;
128: ctx.mask = mask;
129: PetscCall(VecGetSize(y, &N));
130: PetscCall(VecGetLocalSize(y, &n));
131: PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), &CtC));
132: PetscCall(MatSetSizes(CtC, n, n, N, N));
133: PetscCall(MatSetType(CtC, MATSHELL));
134: PetscCall(MatSetUp(CtC));
135: PetscCall(MatShellSetContext(CtC, &ctx));
136: PetscCall(MatShellSetOperation(CtC, MATOP_MULT, (void (*)(void))MatMult_GlobalToLocalNormal));
137: PetscCall(KSPCreate(PetscObjectComm((PetscObject)dm), &ksp));
138: PetscCall(KSPSetOperators(ksp, CtC, CtC));
139: PetscCall(KSPSetType(ksp, KSPCG));
140: PetscCall(KSPGetPC(ksp, &pc));
141: PetscCall(PCSetType(pc, PCNONE));
142: PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
143: PetscCall(KSPSetUp(ksp));
144: PetscCall(DMGetGlobalVector(dm, &global));
145: PetscCall(VecSet(global, 0.));
146: if (mask) PetscCall(VecPointwiseMult(x, mask, x));
147: PetscCall(DMLocalToGlobalBegin(dm, x, ADD_VALUES, global));
148: PetscCall(DMLocalToGlobalEnd(dm, x, ADD_VALUES, global));
149: PetscCall(KSPSolve(ksp, global, y));
150: PetscCall(DMRestoreGlobalVector(dm, &global));
151: /* clean up */
152: PetscCall(KSPDestroy(&ksp));
153: PetscCall(MatDestroy(&CtC));
154: if (isPlex) {
155: PetscCall(VecDestroy(&mask));
156: } else {
157: PetscCall(DMRestoreNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
158: }
160: PetscFunctionReturn(PETSC_SUCCESS);
161: }
163: /*@C
164: DMProjectField - This projects a given function of the input fields into the function space provided by a `DM`, putting the coefficients in a global vector.
166: Collective
168: Input Parameters:
169: + dm - The `DM`
170: . time - The time
171: . U - The input field vector
172: . funcs - The functions to evaluate, one per field
173: - mode - The insertion mode for values
175: Output Parameter:
176: . X - The output vector
178: Calling sequence of `funcs`:
179: + dim - The spatial dimension
180: . Nf - The number of input fields
181: . NfAux - The number of input auxiliary fields
182: . uOff - The offset of each field in u[]
183: . uOff_x - The offset of each field in u_x[]
184: . u - The field values at this point in space
185: . u_t - The field time derivative at this point in space (or `NULL`)
186: . u_x - The field derivatives at this point in space
187: . aOff - The offset of each auxiliary field in u[]
188: . aOff_x - The offset of each auxiliary field in u_x[]
189: . a - The auxiliary field values at this point in space
190: . a_t - The auxiliary field time derivative at this point in space (or `NULL`)
191: . a_x - The auxiliary field derivatives at this point in space
192: . t - The current time
193: . x - The coordinates of this point
194: . numConstants - The number of constants
195: . constants - The value of each constant
196: - f - The value of the function at this point in space
198: Level: advanced
200: Note:
201: There are three different `DM`s that potentially interact in this function. The output `DM`, dm, specifies the layout of the values calculates by funcs.
202: The input `DM`, attached to U, may be different. For example, you can input the solution over the full domain, but output over a piece of the boundary, or
203: a subdomain. You can also output a different number of fields than the input, with different discretizations. Last the auxiliary `DM`, attached to the
204: auxiliary field vector, which is attached to dm, can also be different. It can have a different topology, number of fields, and discretizations.
206: .seealso: [](ch_ksp), `DM`, `DMProjectFieldLocal()`, `DMProjectFieldLabelLocal()`, `DMProjectFunction()`, `DMComputeL2Diff()`
207: @*/
208: PetscErrorCode DMProjectField(DM dm, PetscReal time, Vec U, void (**funcs)(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f[]), InsertMode mode, Vec X)
209: {
210: Vec localX, localU;
211: DM dmIn;
213: PetscFunctionBegin;
215: PetscCall(DMGetLocalVector(dm, &localX));
216: /* We currently check whether locU == locX to see if we need to apply BC */
217: if (U != X) {
218: PetscCall(VecGetDM(U, &dmIn));
219: PetscCall(DMGetLocalVector(dmIn, &localU));
220: } else {
221: dmIn = dm;
222: localU = localX;
223: }
224: PetscCall(DMGlobalToLocalBegin(dmIn, U, INSERT_VALUES, localU));
225: PetscCall(DMGlobalToLocalEnd(dmIn, U, INSERT_VALUES, localU));
226: PetscCall(DMProjectFieldLocal(dm, time, localU, funcs, mode, localX));
227: PetscCall(DMLocalToGlobalBegin(dm, localX, mode, X));
228: PetscCall(DMLocalToGlobalEnd(dm, localX, mode, X));
229: if (mode == INSERT_VALUES || mode == INSERT_ALL_VALUES || mode == INSERT_BC_VALUES) {
230: Mat cMat;
232: PetscCall(DMGetDefaultConstraints(dm, NULL, &cMat, NULL));
233: if (cMat) PetscCall(DMGlobalToLocalSolve(dm, localX, X));
234: }
235: PetscCall(DMRestoreLocalVector(dm, &localX));
236: if (U != X) PetscCall(DMRestoreLocalVector(dmIn, &localU));
237: PetscFunctionReturn(PETSC_SUCCESS);
238: }
240: /********************* Adaptive Interpolation **************************/
242: /* See the discussion of Adaptive Interpolation in manual/high_level_mg.rst */
243: PetscErrorCode DMAdaptInterpolator(DM dmc, DM dmf, Mat In, KSP smoother, Mat MF, Mat MC, Mat *InAdapt, void *user)
244: {
245: Mat globalA, AF;
246: Vec tmp;
247: const PetscScalar *af, *ac;
248: PetscScalar *A, *b, *x, *workscalar;
249: PetscReal *w, *sing, *workreal, rcond = PETSC_SMALL;
250: PetscBLASInt M, N, one = 1, irank, lwrk, info;
251: PetscInt debug = 0, rStart, rEnd, r, maxcols = 0, k, Nc, ldac, ldaf;
252: PetscBool allocVc = PETSC_FALSE;
254: PetscFunctionBegin;
255: PetscCall(PetscLogEventBegin(DM_AdaptInterpolator, dmc, dmf, 0, 0));
256: PetscCall(PetscOptionsGetInt(NULL, NULL, "-dm_interpolator_adapt_debug", &debug, NULL));
257: PetscCall(MatGetSize(MF, NULL, &Nc));
258: PetscCall(MatDuplicate(In, MAT_SHARE_NONZERO_PATTERN, InAdapt));
259: PetscCall(MatGetOwnershipRange(In, &rStart, &rEnd));
260: #if 0
261: PetscCall(MatGetMaxRowLen(In, &maxcols));
262: #else
263: for (r = rStart; r < rEnd; ++r) {
264: PetscInt ncols;
266: PetscCall(MatGetRow(In, r, &ncols, NULL, NULL));
267: maxcols = PetscMax(maxcols, ncols);
268: PetscCall(MatRestoreRow(In, r, &ncols, NULL, NULL));
269: }
270: #endif
271: if (Nc < maxcols) PetscCall(PetscPrintf(PETSC_COMM_SELF, "The number of input vectors %" PetscInt_FMT " < %" PetscInt_FMT " the maximum number of column entries\n", Nc, maxcols));
272: for (k = 0; k < Nc && debug; ++k) {
273: char name[PETSC_MAX_PATH_LEN];
274: const char *prefix;
275: Vec vc, vf;
277: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)smoother, &prefix));
279: if (MC) {
280: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN, "%sCoarse Vector %" PetscInt_FMT, prefix ? prefix : NULL, k));
281: PetscCall(MatDenseGetColumnVecRead(MC, k, &vc));
282: PetscCall(PetscObjectSetName((PetscObject)vc, name));
283: PetscCall(VecViewFromOptions(vc, NULL, "-dm_adapt_interp_view_coarse"));
284: PetscCall(MatDenseRestoreColumnVecRead(MC, k, &vc));
285: }
286: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN, "%sFine Vector %" PetscInt_FMT, prefix ? prefix : NULL, k));
287: PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
288: PetscCall(PetscObjectSetName((PetscObject)vf, name));
289: PetscCall(VecViewFromOptions(vf, NULL, "-dm_adapt_interp_view_fine"));
290: PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
291: }
292: PetscCall(PetscBLASIntCast(3 * PetscMin(Nc, maxcols) + PetscMax(2 * PetscMin(Nc, maxcols), PetscMax(Nc, maxcols)), &lwrk));
293: PetscCall(PetscMalloc7(Nc * maxcols, &A, PetscMax(Nc, maxcols), &b, Nc, &w, maxcols, &x, maxcols, &sing, lwrk, &workscalar, 5 * PetscMin(Nc, maxcols), &workreal));
294: /* w_k = \frac{\HC{v_k} B_l v_k}{\HC{v_k} A_l v_k} or the inverse Rayleigh quotient, which we calculate using \frac{\HC{v_k} v_k}{\HC{v_k} B^{-1}_l A_l v_k} */
295: PetscCall(KSPGetOperators(smoother, &globalA, NULL));
297: PetscCall(MatMatMult(globalA, MF, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &AF));
298: for (k = 0; k < Nc; ++k) {
299: PetscScalar vnorm, vAnorm;
300: Vec vf;
302: w[k] = 1.0;
303: PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
304: PetscCall(MatDenseGetColumnVecRead(AF, k, &tmp));
305: PetscCall(VecDot(vf, vf, &vnorm));
306: #if 0
307: PetscCall(DMGetGlobalVector(dmf, &tmp2));
308: PetscCall(KSPSolve(smoother, tmp, tmp2));
309: PetscCall(VecDot(vf, tmp2, &vAnorm));
310: PetscCall(DMRestoreGlobalVector(dmf, &tmp2));
311: #else
312: PetscCall(VecDot(vf, tmp, &vAnorm));
313: #endif
314: w[k] = PetscRealPart(vnorm) / PetscRealPart(vAnorm);
315: PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
316: PetscCall(MatDenseRestoreColumnVecRead(AF, k, &tmp));
317: }
318: PetscCall(MatDestroy(&AF));
319: if (!MC) {
320: allocVc = PETSC_TRUE;
321: PetscCall(MatTransposeMatMult(In, MF, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &MC));
322: }
323: /* Solve a LS system for each fine row
324: MATT: Can we generalize to the case where Nc for the fine space
325: is different for Nc for the coarse? */
326: PetscCall(MatDenseGetArrayRead(MF, &af));
327: PetscCall(MatDenseGetLDA(MF, &ldaf));
328: PetscCall(MatDenseGetArrayRead(MC, &ac));
329: PetscCall(MatDenseGetLDA(MC, &ldac));
330: for (r = rStart; r < rEnd; ++r) {
331: PetscInt ncols, c;
332: const PetscInt *cols;
333: const PetscScalar *vals;
335: PetscCall(MatGetRow(In, r, &ncols, &cols, &vals));
336: for (k = 0; k < Nc; ++k) {
337: /* Need to fit lowest mode exactly */
338: const PetscReal wk = ((ncols == 1) && (k > 0)) ? 0.0 : PetscSqrtReal(w[k]);
340: /* b_k = \sqrt{w_k} f^{F,k}_r */
341: b[k] = wk * af[r - rStart + k * ldaf];
342: /* A_{kc} = \sqrt{w_k} f^{C,k}_c */
343: /* TODO Must pull out VecScatter from In, scatter in vc[k] values up front, and access them indirectly just as in MatMult() */
344: for (c = 0; c < ncols; ++c) {
345: /* This is element (k, c) of A */
346: A[c * Nc + k] = wk * ac[cols[c] - rStart + k * ldac];
347: }
348: }
349: PetscCall(PetscBLASIntCast(Nc, &M));
350: PetscCall(PetscBLASIntCast(ncols, &N));
351: if (debug) {
352: #if defined(PETSC_USE_COMPLEX)
353: PetscScalar *tmp;
354: PetscInt j;
356: PetscCall(DMGetWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
357: for (j = 0; j < Nc; ++j) tmp[j] = w[j];
358: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS weights", Nc, 1, tmp));
359: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS matrix", Nc, ncols, A));
360: for (j = 0; j < Nc; ++j) tmp[j] = b[j];
361: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS rhs", Nc, 1, tmp));
362: PetscCall(DMRestoreWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
363: #else
364: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS weights", Nc, 1, w));
365: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS matrix", Nc, ncols, A));
366: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS rhs", Nc, 1, b));
367: #endif
368: }
369: #if defined(PETSC_USE_COMPLEX)
370: /* ZGELSS( M, N, NRHS, A, LDA, B, LDB, S, RCOND, RANK, WORK, LWORK, RWORK, INFO) */
371: PetscCallBLAS("LAPACKgelss", LAPACKgelss_(&M, &N, &one, A, &M, b, M > N ? &M : &N, sing, &rcond, &irank, workscalar, &lwrk, workreal, &info));
372: #else
373: /* DGELSS( M, N, NRHS, A, LDA, B, LDB, S, RCOND, RANK, WORK, LWORK, INFO) */
374: PetscCallBLAS("LAPACKgelss", LAPACKgelss_(&M, &N, &one, A, &M, b, M > N ? &M : &N, sing, &rcond, &irank, workscalar, &lwrk, &info));
375: #endif
376: PetscCheck(info >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Bad argument to GELSS");
377: PetscCheck(info <= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "SVD failed to converge");
378: if (debug) {
379: PetscCall(PetscPrintf(PETSC_COMM_SELF, "rank %" PetscBLASInt_FMT " rcond %g\n", irank, (double)rcond));
380: #if defined(PETSC_USE_COMPLEX)
381: {
382: PetscScalar *tmp;
383: PetscInt j;
385: PetscCall(DMGetWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
386: for (j = 0; j < PetscMin(Nc, ncols); ++j) tmp[j] = sing[j];
387: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS singular values", PetscMin(Nc, ncols), 1, tmp));
388: PetscCall(DMRestoreWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
389: }
390: #else
391: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS singular values", PetscMin(Nc, ncols), 1, sing));
392: #endif
393: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS old P", ncols, 1, vals));
394: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS sol", ncols, 1, b));
395: }
396: PetscCall(MatSetValues(*InAdapt, 1, &r, ncols, cols, b, INSERT_VALUES));
397: PetscCall(MatRestoreRow(In, r, &ncols, &cols, &vals));
398: }
399: PetscCall(MatDenseRestoreArrayRead(MF, &af));
400: PetscCall(MatDenseRestoreArrayRead(MC, &ac));
401: PetscCall(PetscFree7(A, b, w, x, sing, workscalar, workreal));
402: if (allocVc) PetscCall(MatDestroy(&MC));
403: PetscCall(MatAssemblyBegin(*InAdapt, MAT_FINAL_ASSEMBLY));
404: PetscCall(MatAssemblyEnd(*InAdapt, MAT_FINAL_ASSEMBLY));
405: PetscCall(PetscLogEventEnd(DM_AdaptInterpolator, dmc, dmf, 0, 0));
406: PetscFunctionReturn(PETSC_SUCCESS);
407: }
409: PetscErrorCode DMCheckInterpolator(DM dmf, Mat In, Mat MC, Mat MF, PetscReal tol)
410: {
411: Vec tmp;
412: PetscReal norminf, norm2, maxnorminf = 0.0, maxnorm2 = 0.0;
413: PetscInt k, Nc;
415: PetscFunctionBegin;
416: PetscCall(DMGetGlobalVector(dmf, &tmp));
417: PetscCall(MatViewFromOptions(In, NULL, "-dm_interpolator_adapt_error"));
418: PetscCall(MatGetSize(MF, NULL, &Nc));
419: for (k = 0; k < Nc; ++k) {
420: Vec vc, vf;
422: PetscCall(MatDenseGetColumnVecRead(MC, k, &vc));
423: PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
424: PetscCall(MatMult(In, vc, tmp));
425: PetscCall(VecAXPY(tmp, -1.0, vf));
426: PetscCall(VecViewFromOptions(vc, NULL, "-dm_interpolator_adapt_error"));
427: PetscCall(VecViewFromOptions(vf, NULL, "-dm_interpolator_adapt_error"));
428: PetscCall(VecViewFromOptions(tmp, NULL, "-dm_interpolator_adapt_error"));
429: PetscCall(VecNorm(tmp, NORM_INFINITY, &norminf));
430: PetscCall(VecNorm(tmp, NORM_2, &norm2));
431: maxnorminf = PetscMax(maxnorminf, norminf);
432: maxnorm2 = PetscMax(maxnorm2, norm2);
433: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)dmf), "Coarse vec %" PetscInt_FMT " ||vf - P vc||_\\infty %g, ||vf - P vc||_2 %g\n", k, (double)norminf, (double)norm2));
434: PetscCall(MatDenseRestoreColumnVecRead(MC, k, &vc));
435: PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
436: }
437: PetscCall(DMRestoreGlobalVector(dmf, &tmp));
438: PetscCheck(maxnorm2 <= tol, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_WRONG, "max_k ||vf_k - P vc_k||_2 %g > tol %g", (double)maxnorm2, (double)tol);
439: PetscFunctionReturn(PETSC_SUCCESS);
440: }
442: // Project particles to field
443: // M_f u_f = M_p u_p
444: // u_f = M^{-1}_f M_p u_p
445: static PetscErrorCode DMSwarmProjectField_Conservative_PLEX(DM sw, DM dm, Vec u_p, Vec u_f)
446: {
447: KSP ksp;
448: Mat M_f, M_p; // TODO Should cache these
449: Vec rhs;
450: const char *prefix;
452: PetscFunctionBegin;
453: PetscCall(DMCreateMassMatrix(dm, dm, &M_f));
454: PetscCall(DMCreateMassMatrix(sw, dm, &M_p));
455: PetscCall(DMGetGlobalVector(dm, &rhs));
456: PetscCall(MatMultTranspose(M_p, u_p, rhs));
458: PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
459: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
460: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
461: PetscCall(KSPAppendOptionsPrefix(ksp, "ptof_"));
462: PetscCall(KSPSetFromOptions(ksp));
464: PetscCall(KSPSetOperators(ksp, M_f, M_f));
465: PetscCall(KSPSolve(ksp, rhs, u_f));
467: PetscCall(DMRestoreGlobalVector(dm, &rhs));
468: PetscCall(KSPDestroy(&ksp));
469: PetscCall(MatDestroy(&M_f));
470: PetscCall(MatDestroy(&M_p));
471: PetscFunctionReturn(PETSC_SUCCESS);
472: }
474: // Project field to particles
475: // M_p u_p = M_f u_f
476: // u_p = M^+_p M_f u_f
477: static PetscErrorCode DMSwarmProjectParticles_Conservative_PLEX(DM sw, DM dm, Vec u_p, Vec u_f)
478: {
479: KSP ksp;
480: PC pc;
481: Mat M_f, M_p, PM_p;
482: Vec rhs;
483: PetscBool isBjacobi;
484: const char *prefix;
486: PetscFunctionBegin;
487: PetscCall(DMCreateMassMatrix(dm, dm, &M_f));
488: PetscCall(DMCreateMassMatrix(sw, dm, &M_p));
489: PetscCall(DMGetGlobalVector(dm, &rhs));
490: PetscCall(MatMultTranspose(M_f, u_f, rhs));
492: PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
493: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
494: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
495: PetscCall(KSPAppendOptionsPrefix(ksp, "ftop_"));
496: PetscCall(KSPSetFromOptions(ksp));
498: PetscCall(KSPGetPC(ksp, &pc));
499: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBJACOBI, &isBjacobi));
500: if (isBjacobi) {
501: PetscCall(DMSwarmCreateMassMatrixSquare(sw, dm, &PM_p));
502: } else {
503: PM_p = M_p;
504: PetscCall(PetscObjectReference((PetscObject)PM_p));
505: }
506: PetscCall(KSPSetOperators(ksp, M_p, PM_p));
507: PetscCall(KSPSolveTranspose(ksp, rhs, u_p));
509: PetscCall(DMRestoreGlobalVector(dm, &rhs));
510: PetscCall(KSPDestroy(&ksp));
511: PetscCall(MatDestroy(&M_f));
512: PetscCall(MatDestroy(&M_p));
513: PetscCall(MatDestroy(&PM_p));
514: PetscFunctionReturn(PETSC_SUCCESS);
515: }
517: static PetscErrorCode DMSwarmProjectFields_Plex_Internal(DM sw, DM dm, PetscInt Nf, const char *fieldnames[], Vec vec, ScatterMode mode)
518: {
519: Vec u;
521: PetscFunctionBegin;
522: PetscCall(PetscCitationsRegister(SwarmProjCitation, &SwarmProjcite));
523: PetscCheck(Nf == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Currently supported only for a single field");
524: PetscCall(DMSwarmCreateGlobalVectorFromField(sw, fieldnames[0], &u));
525: if (mode == SCATTER_FORWARD) {
526: PetscCall(DMSwarmProjectField_Conservative_PLEX(sw, dm, u, vec));
527: } else {
528: PetscCall(DMSwarmProjectParticles_Conservative_PLEX(sw, dm, u, vec));
529: }
530: PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, fieldnames[0], &u));
531: PetscFunctionReturn(PETSC_SUCCESS);
532: }
534: static PetscErrorCode DMSwarmProjectField_ApproxQ1_DA_2D(DM swarm, PetscReal *swarm_field, DM dm, Vec v_field)
535: {
536: Vec v_field_l, denom_l, coor_l, denom;
537: PetscScalar *_field_l, *_denom_l;
538: PetscInt k, p, e, npoints, nel, npe;
539: PetscInt *mpfield_cell;
540: PetscReal *mpfield_coor;
541: const PetscInt *element_list;
542: const PetscInt *element;
543: PetscScalar xi_p[2], Ni[4];
544: const PetscScalar *_coor;
546: PetscFunctionBegin;
547: PetscCall(VecZeroEntries(v_field));
549: PetscCall(DMGetLocalVector(dm, &v_field_l));
550: PetscCall(DMGetGlobalVector(dm, &denom));
551: PetscCall(DMGetLocalVector(dm, &denom_l));
552: PetscCall(VecZeroEntries(v_field_l));
553: PetscCall(VecZeroEntries(denom));
554: PetscCall(VecZeroEntries(denom_l));
556: PetscCall(VecGetArray(v_field_l, &_field_l));
557: PetscCall(VecGetArray(denom_l, &_denom_l));
559: PetscCall(DMGetCoordinatesLocal(dm, &coor_l));
560: PetscCall(VecGetArrayRead(coor_l, &_coor));
562: PetscCall(DMDAGetElements(dm, &nel, &npe, &element_list));
563: PetscCall(DMSwarmGetLocalSize(swarm, &npoints));
564: PetscCall(DMSwarmGetField(swarm, DMSwarmPICField_coor, NULL, NULL, (void **)&mpfield_coor));
565: PetscCall(DMSwarmGetField(swarm, DMSwarmPICField_cellid, NULL, NULL, (void **)&mpfield_cell));
567: for (p = 0; p < npoints; p++) {
568: PetscReal *coor_p;
569: const PetscScalar *x0;
570: const PetscScalar *x2;
571: PetscScalar dx[2];
573: e = mpfield_cell[p];
574: coor_p = &mpfield_coor[2 * p];
575: element = &element_list[npe * e];
577: /* compute local coordinates: (xp-x0)/dx = (xip+1)/2 */
578: x0 = &_coor[2 * element[0]];
579: x2 = &_coor[2 * element[2]];
581: dx[0] = x2[0] - x0[0];
582: dx[1] = x2[1] - x0[1];
584: xi_p[0] = 2.0 * (coor_p[0] - x0[0]) / dx[0] - 1.0;
585: xi_p[1] = 2.0 * (coor_p[1] - x0[1]) / dx[1] - 1.0;
587: /* evaluate basis functions */
588: Ni[0] = 0.25 * (1.0 - xi_p[0]) * (1.0 - xi_p[1]);
589: Ni[1] = 0.25 * (1.0 + xi_p[0]) * (1.0 - xi_p[1]);
590: Ni[2] = 0.25 * (1.0 + xi_p[0]) * (1.0 + xi_p[1]);
591: Ni[3] = 0.25 * (1.0 - xi_p[0]) * (1.0 + xi_p[1]);
593: for (k = 0; k < npe; k++) {
594: _field_l[element[k]] += Ni[k] * swarm_field[p];
595: _denom_l[element[k]] += Ni[k];
596: }
597: }
599: PetscCall(DMSwarmRestoreField(swarm, DMSwarmPICField_cellid, NULL, NULL, (void **)&mpfield_cell));
600: PetscCall(DMSwarmRestoreField(swarm, DMSwarmPICField_coor, NULL, NULL, (void **)&mpfield_coor));
601: PetscCall(DMDARestoreElements(dm, &nel, &npe, &element_list));
602: PetscCall(VecRestoreArrayRead(coor_l, &_coor));
603: PetscCall(VecRestoreArray(v_field_l, &_field_l));
604: PetscCall(VecRestoreArray(denom_l, &_denom_l));
606: PetscCall(DMLocalToGlobalBegin(dm, v_field_l, ADD_VALUES, v_field));
607: PetscCall(DMLocalToGlobalEnd(dm, v_field_l, ADD_VALUES, v_field));
608: PetscCall(DMLocalToGlobalBegin(dm, denom_l, ADD_VALUES, denom));
609: PetscCall(DMLocalToGlobalEnd(dm, denom_l, ADD_VALUES, denom));
611: PetscCall(VecPointwiseDivide(v_field, v_field, denom));
613: PetscCall(DMRestoreLocalVector(dm, &v_field_l));
614: PetscCall(DMRestoreLocalVector(dm, &denom_l));
615: PetscCall(DMRestoreGlobalVector(dm, &denom));
616: PetscFunctionReturn(PETSC_SUCCESS);
617: }
619: static PetscErrorCode DMSwarmProjectFields_DA_Internal(DM swarm, DM celldm, PetscInt nfields, DMSwarmDataField dfield[], Vec vecs[], ScatterMode mode)
620: {
621: PetscInt f, dim;
622: DMDAElementType etype;
624: PetscFunctionBegin;
625: PetscCall(DMDAGetElementType(celldm, &etype));
626: PetscCheck(etype != DMDA_ELEMENT_P1, PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "Only Q1 DMDA supported");
627: PetscCheck(mode == SCATTER_FORWARD, PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "Mapping the continuum to particles is not currently supported for DMDA");
629: PetscCall(DMGetDimension(swarm, &dim));
630: switch (dim) {
631: case 2:
632: for (f = 0; f < nfields; f++) {
633: PetscReal *swarm_field;
635: PetscCall(DMSwarmDataFieldGetEntries(dfield[f], (void **)&swarm_field));
636: PetscCall(DMSwarmProjectField_ApproxQ1_DA_2D(swarm, swarm_field, celldm, vecs[f]));
637: }
638: break;
639: case 3:
640: SETERRQ(PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "No support for 3D");
641: default:
642: break;
643: }
644: PetscFunctionReturn(PETSC_SUCCESS);
645: }
647: /*@C
648: DMSwarmProjectFields - Project a set of swarm fields onto the cell `DM`
650: Collective
652: Input Parameters:
653: + dm - the `DMSWARM`
654: . nfields - the number of swarm fields to project
655: . fieldnames - the textual names of the swarm fields to project
656: . fields - an array of Vec's of length nfields
657: - mode - if SCATTER_FORWARD then map particles to the continuum, and if SCATTER_REVERSE map the continuum to particles
659: Level: beginner
661: Notes:
662: Currently, there two available projection methods. The first is conservative projection, used for a `DMPLEX` cell `DM`. The second is the averaging described below, which is used for a `DMDA` cell `DM`
663: .vb
664: phi_i = \sum_{p=0}^{np} N_i(x_p) phi_p dJ / \sum_{p=0}^{np} N_i(x_p) dJ
665: where phi_p is the swarm field at point p,
666: N_i() is the cell DM basis function at vertex i,
667: dJ is the determinant of the cell Jacobian and
668: phi_i is the projected vertex value of the field phi.
669: .ve
671: The user is responsible for destroying both the array and the individual `Vec` objects. For the `DMPLEX` case, there is only a single vector, so the field layout in the `DMPLEX` must match the requested fields from the `DMSwarm`.
673: For averaging projection, nly swarm fields registered with data type of `PETSC_REAL` can be projected onto the cell `DM`, and only swarm fields of block size = 1 can currently be projected.
675: .seealso: [](ch_ksp), `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSetCellDM()`, `DMSwarmType`
676: @*/
677: PetscErrorCode DMSwarmProjectFields(DM dm, PetscInt nfields, const char *fieldnames[], Vec fields[], ScatterMode mode)
678: {
679: DM_Swarm *swarm = (DM_Swarm *)dm->data;
680: DMSwarmDataField *gfield;
681: DM celldm;
682: PetscBool isDA, isPlex;
683: MPI_Comm comm;
685: PetscFunctionBegin;
686: DMSWARMPICVALID(dm);
687: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
688: PetscCall(DMSwarmGetCellDM(dm, &celldm));
689: PetscCall(PetscObjectTypeCompare((PetscObject)celldm, DMDA, &isDA));
690: PetscCall(PetscObjectTypeCompare((PetscObject)celldm, DMPLEX, &isPlex));
691: PetscCall(PetscMalloc1(nfields, &gfield));
692: for (PetscInt f = 0; f < nfields; ++f) PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldnames[f], &gfield[f]));
694: if (isDA) {
695: for (PetscInt f = 0; f < nfields; f++) {
696: PetscCheck(gfield[f]->petsc_type == PETSC_REAL, comm, PETSC_ERR_SUP, "Projection only valid for fields using a data type = PETSC_REAL");
697: PetscCheck(gfield[f]->bs == 1, comm, PETSC_ERR_SUP, "Projection only valid for fields with block size = 1");
698: }
699: PetscCall(DMSwarmProjectFields_DA_Internal(dm, celldm, nfields, gfield, fields, mode));
700: } else if (isPlex) {
701: PetscInt Nf;
703: PetscCall(DMGetNumFields(celldm, &Nf));
704: PetscCheck(Nf == nfields, comm, PETSC_ERR_ARG_WRONG, "Number of DM fields %" PetscInt_FMT " != %" PetscInt_FMT " number of requested Swarm fields", Nf, nfields);
705: PetscCall(DMSwarmProjectFields_Plex_Internal(dm, celldm, nfields, fieldnames, fields[0], mode));
706: } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Only supported for cell DMs of type DMDA and DMPLEX");
708: PetscCall(PetscFree(gfield));
709: PetscFunctionReturn(PETSC_SUCCESS);
710: }