Actual source code: ex18.c

  1: static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";

  3: #include <petsc/private/dmpleximpl.h>
  4: /* List of test meshes

  6: Network
  7: -------
  8: Test 0 (2 ranks):

 10: network=0:
 11: ---------
 12:   cell 0   cell 1   cell 2          nCells-1       (edge)
 13: 0 ------ 1 ------ 2 ------ 3 -- -- v --  -- nCells (vertex)

 15:   vertex distribution:
 16:     rank 0: 0 1
 17:     rank 1: 2 3 ... nCells
 18:   cell(edge) distribution:
 19:     rank 0: 0 1
 20:     rank 1: 2 ... nCells-1

 22: network=1:
 23: ---------
 24:                v2
 25:                 ^
 26:                 |
 27:                cell 2
 28:                 |
 29:  v0 --cell 0--> v3--cell 1--> v1

 31:   vertex distribution:
 32:     rank 0: 0 1 3
 33:     rank 1: 2
 34:   cell(edge) distribution:
 35:     rank 0: 0 1
 36:     rank 1: 2

 38:   example:
 39:     mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50

 41: Triangle
 42: --------
 43: Test 0 (2 ranks):
 44: Two triangles sharing a face

 46:         2
 47:       / | \
 48:      /  |  \
 49:     /   |   \
 50:    0  0 | 1  3
 51:     \   |   /
 52:      \  |  /
 53:       \ | /
 54:         1

 56:   vertex distribution:
 57:     rank 0: 0 1
 58:     rank 1: 2 3
 59:   cell distribution:
 60:     rank 0: 0
 61:     rank 1: 1

 63: Test 1 (3 ranks):
 64: Four triangles partitioned across 3 ranks

 66:    0 _______ 3
 67:    | \     / |
 68:    |  \ 1 /  |
 69:    |   \ /   |
 70:    | 0  2  2 |
 71:    |   / \   |
 72:    |  / 3 \  |
 73:    | /     \ |
 74:    1 ------- 4

 76:   vertex distribution:
 77:     rank 0: 0 1
 78:     rank 1: 2 3
 79:     rank 2: 4
 80:   cell distribution:
 81:     rank 0: 0
 82:     rank 1: 1
 83:     rank 2: 2 3

 85: Test 2 (3 ranks):
 86: Four triangles partitioned across 3 ranks

 88:    1 _______ 3
 89:    | \     / |
 90:    |  \ 1 /  |
 91:    |   \ /   |
 92:    | 0  0  2 |
 93:    |   / \   |
 94:    |  / 3 \  |
 95:    | /     \ |
 96:    2 ------- 4

 98:   vertex distribution:
 99:     rank 0: 0 1
100:     rank 1: 2 3
101:     rank 2: 4
102:   cell distribution:
103:     rank 0: 0
104:     rank 1: 1
105:     rank 2: 2 3

107: Tetrahedron
108: -----------
109: Test 0:
110: Two tets sharing a face

112:  cell   3 _______    cell
113:  0    / | \      \   1
114:      /  |  \      \
115:     /   |   \      \
116:    0----|----4-----2
117:     \   |   /      /
118:      \  |  /      /
119:       \ | /      /
120:         1-------
121:    y
122:    | x
123:    |/
124:    *----z

126:   vertex distribution:
127:     rank 0: 0 1
128:     rank 1: 2 3 4
129:   cell distribution:
130:     rank 0: 0
131:     rank 1: 1

133: Quadrilateral
134: -------------
135: Test 0 (2 ranks):
136: Two quads sharing a face

138:    3-------2-------5
139:    |       |       |
140:    |   0   |   1   |
141:    |       |       |
142:    0-------1-------4

144:   vertex distribution:
145:     rank 0: 0 1 2
146:     rank 1: 3 4 5
147:   cell distribution:
148:     rank 0: 0
149:     rank 1: 1

151: TODO Test 1:
152: A quad and a triangle sharing a face

154:    5-------4
155:    |       | \
156:    |   0   |  \
157:    |       | 1 \
158:    2-------3----6

160: Hexahedron
161: ----------
162: Test 0 (2 ranks):
163: Two hexes sharing a face

165: cell   7-------------6-------------11 cell
166: 0     /|            /|            /|     1
167:      / |   F1      / |   F7      / |
168:     /  |          /  |          /  |
169:    4-------------5-------------10  |
170:    |   |     F4  |   |     F10 |   |
171:    |   |         |   |         |   |
172:    |F5 |         |F3 |         |F9 |
173:    |   |  F2     |   |   F8    |   |
174:    |   3---------|---2---------|---9
175:    |  /          |  /          |  /
176:    | /   F0      | /    F6     | /
177:    |/            |/            |/
178:    0-------------1-------------8

180:   vertex distribution:
181:     rank 0: 0 1 2 3 4 5
182:     rank 1: 6 7 8 9 10 11
183:   cell distribution:
184:     rank 0: 0
185:     rank 1: 1

187: */

189: typedef enum {
190:   NONE,
191:   CREATE,
192:   AFTER_CREATE,
193:   AFTER_DISTRIBUTE
194: } InterpType;

196: typedef struct {
197:   PetscInt    debug;        /* The debugging level */
198:   PetscInt    testNum;      /* Indicates the mesh to create */
199:   PetscInt    dim;          /* The topological mesh dimension */
200:   PetscBool   cellSimplex;  /* Use simplices or hexes */
201:   PetscBool   distribute;   /* Distribute the mesh */
202:   InterpType  interpolate;  /* Interpolate the mesh before or after DMPlexDistribute() */
203:   PetscBool   useGenerator; /* Construct mesh with a mesh generator */
204:   PetscBool   testOrientIF; /* Test for different original interface orientations */
205:   PetscBool   testHeavy;    /* Run the heavy PointSF test */
206:   PetscBool   customView;   /* Show results of DMPlexIsInterpolated() etc. */
207:   PetscInt    ornt[2];      /* Orientation of interface on rank 0 and rank 1 */
208:   PetscInt    faces[3];     /* Number of faces per dimension for generator */
209:   PetscScalar coords[128];
210:   PetscReal   coordsTol;
211:   PetscInt    ncoords;
212:   PetscInt    pointsToExpand[128];
213:   PetscInt    nPointsToExpand;
214:   PetscBool   testExpandPointsEmpty;
215:   char        filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
216: } AppCtx;

218: struct _n_PortableBoundary {
219:   Vec           coordinates;
220:   PetscInt      depth;
221:   PetscSection *sections;
222: };
223: typedef struct _n_PortableBoundary *PortableBoundary;

225: static PetscLogStage stage[3];

227: static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
228: static PetscErrorCode DMPlexSetOrientInterface_Private(DM, PetscBool);
229: static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
230: static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);

232: static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
233: {
234:   PetscInt d;

236:   PetscFunctionBegin;
237:   if (!*bnd) PetscFunctionReturn(PETSC_SUCCESS);
238:   PetscCall(VecDestroy(&(*bnd)->coordinates));
239:   for (d = 0; d < (*bnd)->depth; d++) PetscCall(PetscSectionDestroy(&(*bnd)->sections[d]));
240:   PetscCall(PetscFree((*bnd)->sections));
241:   PetscCall(PetscFree(*bnd));
242:   PetscFunctionReturn(PETSC_SUCCESS);
243: }

245: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
246: {
247:   const char *interpTypes[4] = {"none", "create", "after_create", "after_distribute"};
248:   PetscInt    interp         = NONE, dim;
249:   PetscBool   flg1, flg2;

251:   PetscFunctionBegin;
252:   options->debug                 = 0;
253:   options->testNum               = 0;
254:   options->dim                   = 2;
255:   options->cellSimplex           = PETSC_TRUE;
256:   options->distribute            = PETSC_FALSE;
257:   options->interpolate           = NONE;
258:   options->useGenerator          = PETSC_FALSE;
259:   options->testOrientIF          = PETSC_FALSE;
260:   options->testHeavy             = PETSC_TRUE;
261:   options->customView            = PETSC_FALSE;
262:   options->testExpandPointsEmpty = PETSC_FALSE;
263:   options->ornt[0]               = 0;
264:   options->ornt[1]               = 0;
265:   options->faces[0]              = 2;
266:   options->faces[1]              = 2;
267:   options->faces[2]              = 2;
268:   options->filename[0]           = '\0';
269:   options->coordsTol             = PETSC_DEFAULT;

271:   PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
272:   PetscCall(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL, 0));
273:   PetscCall(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL, 0));
274:   PetscCall(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL));
275:   PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL));
276:   PetscCall(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL));
277:   options->interpolate = (InterpType)interp;
278:   PetscCheck(options->distribute || options->interpolate != AFTER_DISTRIBUTE, comm, PETSC_ERR_SUP, "-interpolate after_distribute  needs  -distribute 1");
279:   PetscCall(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL));
280:   options->ncoords = 128;
281:   PetscCall(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL));
282:   PetscCall(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL));
283:   options->nPointsToExpand = 128;
284:   PetscCall(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL));
285:   if (options->nPointsToExpand) PetscCall(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL));
286:   PetscCall(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL));
287:   PetscCall(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL));
288:   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1, 1, 3));
289:   dim = 3;
290:   PetscCall(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2));
291:   if (flg2) {
292:     PetscCheck(!flg1 || dim == options->dim, comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %" PetscInt_FMT " is not equal to length %" PetscInt_FMT " of -faces (note that -dim can be omitted)", options->dim, dim);
293:     options->dim = dim;
294:   }
295:   PetscCall(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL));
296:   PetscCall(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF, 0));
297:   PetscCall(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2, 0));
298:   PetscCheck(flg2 == options->testOrientIF, comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
299:   if (options->testOrientIF) {
300:     PetscInt i;
301:     for (i = 0; i < 2; i++) {
302:       if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i] - 10); /* 11 12 13 become -1 -2 -3 */
303:     }
304:     options->filename[0]  = 0;
305:     options->useGenerator = PETSC_FALSE;
306:     options->dim          = 3;
307:     options->cellSimplex  = PETSC_TRUE;
308:     options->interpolate  = CREATE;
309:     options->distribute   = PETSC_FALSE;
310:   }
311:   PetscOptionsEnd();
312:   PetscFunctionReturn(PETSC_SUCCESS);
313: }

315: static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
316: {
317:   PetscInt    testNum = user->testNum;
318:   PetscMPIInt rank, size;
319:   PetscInt    numCorners = 2, i;
320:   PetscInt    numCells, numVertices, network;
321:   PetscInt   *cells;
322:   PetscReal  *coords;

324:   PetscFunctionBegin;
325:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
326:   PetscCallMPI(MPI_Comm_size(comm, &size));
327:   PetscCheck(size <= 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for <=2 processes", testNum);

329:   numCells = 3;
330:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL));
331:   PetscCheck(numCells >= 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells %" PetscInt_FMT " must >=3", numCells);

333:   if (size == 1) {
334:     numVertices = numCells + 1;
335:     PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numVertices, &coords));
336:     for (i = 0; i < numCells; i++) {
337:       cells[2 * i]      = i;
338:       cells[2 * i + 1]  = i + 1;
339:       coords[2 * i]     = i;
340:       coords[2 * i + 1] = i + 1;
341:     }

343:     PetscCall(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm));
344:     PetscCall(PetscFree2(cells, coords));
345:     PetscFunctionReturn(PETSC_SUCCESS);
346:   }

348:   network = 0;
349:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL));
350:   if (network == 0) {
351:     switch (rank) {
352:     case 0: {
353:       numCells    = 2;
354:       numVertices = numCells;
355:       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
356:       cells[0]  = 0;
357:       cells[1]  = 1;
358:       cells[2]  = 1;
359:       cells[3]  = 2;
360:       coords[0] = 0.;
361:       coords[1] = 1.;
362:       coords[2] = 1.;
363:       coords[3] = 2.;
364:     } break;
365:     case 1: {
366:       numCells -= 2;
367:       numVertices = numCells + 1;
368:       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
369:       for (i = 0; i < numCells; i++) {
370:         cells[2 * i]      = 2 + i;
371:         cells[2 * i + 1]  = 2 + i + 1;
372:         coords[2 * i]     = 2 + i;
373:         coords[2 * i + 1] = 2 + i + 1;
374:       }
375:     } break;
376:     default:
377:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
378:     }
379:   } else { /* network_case = 1 */
380:     /* ----------------------- */
381:     switch (rank) {
382:     case 0: {
383:       numCells    = 2;
384:       numVertices = 3;
385:       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
386:       cells[0] = 0;
387:       cells[1] = 3;
388:       cells[2] = 3;
389:       cells[3] = 1;
390:     } break;
391:     case 1: {
392:       numCells    = 1;
393:       numVertices = 1;
394:       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
395:       cells[0] = 3;
396:       cells[1] = 2;
397:     } break;
398:     default:
399:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
400:     }
401:   }
402:   PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm));
403:   PetscCall(PetscFree2(cells, coords));
404:   PetscFunctionReturn(PETSC_SUCCESS);
405: }

407: static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
408: {
409:   PetscInt    testNum = user->testNum, p;
410:   PetscMPIInt rank, size;

412:   PetscFunctionBegin;
413:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
414:   PetscCallMPI(MPI_Comm_size(comm, &size));
415:   switch (testNum) {
416:   case 0:
417:     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
418:     switch (rank) {
419:     case 0: {
420:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
421:       const PetscInt cells[3]        = {0, 1, 2};
422:       PetscReal      coords[4]       = {-0.5, 0.5, 0.0, 0.0};
423:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

425:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
426:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
427:     } break;
428:     case 1: {
429:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
430:       const PetscInt cells[3]        = {1, 3, 2};
431:       PetscReal      coords[4]       = {0.0, 1.0, 0.5, 0.5};
432:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

434:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
435:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
436:     } break;
437:     default:
438:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
439:     }
440:     break;
441:   case 1:
442:     PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
443:     switch (rank) {
444:     case 0: {
445:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
446:       const PetscInt cells[3]        = {0, 1, 2};
447:       PetscReal      coords[4]       = {0.0, 1.0, 0.0, 0.0};
448:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

450:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
451:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
452:     } break;
453:     case 1: {
454:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
455:       const PetscInt cells[3]        = {0, 2, 3};
456:       PetscReal      coords[4]       = {0.5, 0.5, 1.0, 1.0};
457:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

459:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
460:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
461:     } break;
462:     case 2: {
463:       const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
464:       const PetscInt cells[6]         = {2, 4, 3, 2, 1, 4};
465:       PetscReal      coords[2]        = {1.0, 0.0};
466:       PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};

468:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
469:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
470:     } break;
471:     default:
472:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
473:     }
474:     break;
475:   case 2:
476:     PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
477:     switch (rank) {
478:     case 0: {
479:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
480:       const PetscInt cells[3]        = {1, 2, 0};
481:       PetscReal      coords[4]       = {0.5, 0.5, 0.0, 1.0};
482:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

484:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
485:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
486:     } break;
487:     case 1: {
488:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
489:       const PetscInt cells[3]        = {1, 0, 3};
490:       PetscReal      coords[4]       = {0.0, 0.0, 1.0, 1.0};
491:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

493:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
494:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
495:     } break;
496:     case 2: {
497:       const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
498:       const PetscInt cells[6]         = {0, 4, 3, 0, 2, 4};
499:       PetscReal      coords[2]        = {1.0, 0.0};
500:       PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};

502:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
503:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
504:     } break;
505:     default:
506:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
507:     }
508:     break;
509:   default:
510:     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
511:   }
512:   PetscFunctionReturn(PETSC_SUCCESS);
513: }

515: static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
516: {
517:   PetscInt    testNum = user->testNum, p;
518:   PetscMPIInt rank, size;

520:   PetscFunctionBegin;
521:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
522:   PetscCallMPI(MPI_Comm_size(comm, &size));
523:   switch (testNum) {
524:   case 0:
525:     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
526:     switch (rank) {
527:     case 0: {
528:       const PetscInt numCells = 1, numVertices = 2, numCorners = 4;
529:       const PetscInt cells[4]        = {0, 2, 1, 3};
530:       PetscReal      coords[6]       = {0.0, 0.0, -0.5, 0.0, -0.5, 0.0};
531:       PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};

533:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
534:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
535:     } break;
536:     case 1: {
537:       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
538:       const PetscInt cells[4]        = {1, 2, 4, 3};
539:       PetscReal      coords[9]       = {1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5};
540:       PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};

542:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
543:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
544:     } break;
545:     default:
546:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
547:     }
548:     break;
549:   default:
550:     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
551:   }
552:   if (user->testOrientIF) {
553:     PetscInt ifp[] = {8, 6};

555:     PetscCall(PetscObjectSetName((PetscObject)*dm, "Mesh before orientation"));
556:     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
557:     /* rotate interface face ifp[rank] by given orientation ornt[rank] */
558:     PetscCall(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]));
559:     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
560:     PetscCall(DMPlexCheckFaces(*dm, 0));
561:     PetscCall(DMPlexOrientInterface_Internal(*dm));
562:     PetscCall(PetscPrintf(comm, "Orientation test PASSED\n"));
563:   }
564:   PetscFunctionReturn(PETSC_SUCCESS);
565: }

567: static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
568: {
569:   PetscInt    testNum = user->testNum, p;
570:   PetscMPIInt rank, size;

572:   PetscFunctionBegin;
573:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
574:   PetscCallMPI(MPI_Comm_size(comm, &size));
575:   switch (testNum) {
576:   case 0:
577:     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
578:     switch (rank) {
579:     case 0: {
580:       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
581:       const PetscInt cells[4]            = {0, 1, 2, 3};
582:       PetscReal      coords[6]           = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
583:       PetscInt       markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};

585:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
586:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
587:     } break;
588:     case 1: {
589:       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
590:       const PetscInt cells[4]            = {1, 4, 5, 2};
591:       PetscReal      coords[6]           = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
592:       PetscInt       markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};

594:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
595:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
596:     } break;
597:     default:
598:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
599:     }
600:     break;
601:   default:
602:     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
603:   }
604:   PetscFunctionReturn(PETSC_SUCCESS);
605: }

607: static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
608: {
609:   PetscInt    testNum = user->testNum, p;
610:   PetscMPIInt rank, size;

612:   PetscFunctionBegin;
613:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
614:   PetscCallMPI(MPI_Comm_size(comm, &size));
615:   switch (testNum) {
616:   case 0:
617:     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
618:     switch (rank) {
619:     case 0: {
620:       const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
621:       const PetscInt cells[8]            = {0, 3, 2, 1, 4, 5, 6, 7};
622:       PetscReal      coords[6 * 3]       = {-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, -0.5, 1.0, 0.0, -0.5, 0.0, 1.0, 0.0, 0.0, 1.0};
623:       PetscInt       markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};

625:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
626:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
627:     } break;
628:     case 1: {
629:       const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
630:       const PetscInt cells[8]            = {1, 2, 9, 8, 5, 10, 11, 6};
631:       PetscReal      coords[6 * 3]       = {0.0, 1.0, 1.0, -0.5, 1.0, 1.0, 0.5, 0.0, 0.0, 0.5, 1.0, 0.0, 0.5, 0.0, 1.0, 0.5, 1.0, 1.0};
632:       PetscInt       markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};

634:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
635:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
636:     } break;
637:     default:
638:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
639:     }
640:     break;
641:   default:
642:     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
643:   }
644:   PetscFunctionReturn(PETSC_SUCCESS);
645: }

647: static PetscErrorCode CustomView(DM dm, PetscViewer v)
648: {
649:   DMPlexInterpolatedFlag interpolated;
650:   PetscBool              distributed;

652:   PetscFunctionBegin;
653:   PetscCall(DMPlexIsDistributed(dm, &distributed));
654:   PetscCall(DMPlexIsInterpolatedCollective(dm, &interpolated));
655:   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]));
656:   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]));
657:   PetscFunctionReturn(PETSC_SUCCESS);
658: }

660: static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
661: {
662:   const char *filename     = user->filename;
663:   PetscBool   testHeavy    = user->testHeavy;
664:   PetscBool   interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
665:   PetscBool   distributed  = PETSC_FALSE;

667:   PetscFunctionBegin;
668:   *serialDM = NULL;
669:   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE));
670:   PetscCall(PetscLogStagePush(stage[0]));
671:   PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
672:   PetscCall(PetscLogStagePop());
673:   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE));
674:   PetscCall(DMPlexIsDistributed(*dm, &distributed));
675:   PetscCall(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial"));
676:   if (testHeavy && distributed) {
677:     PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL));
678:     PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM));
679:     PetscCall(DMPlexIsDistributed(*serialDM, &distributed));
680:     PetscCheck(!distributed, comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
681:   }
682:   PetscCall(DMGetDimension(*dm, &user->dim));
683:   PetscFunctionReturn(PETSC_SUCCESS);
684: }

686: static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
687: {
688:   PetscPartitioner part;
689:   PortableBoundary boundary       = NULL;
690:   DM               serialDM       = NULL;
691:   PetscBool        cellSimplex    = user->cellSimplex;
692:   PetscBool        useGenerator   = user->useGenerator;
693:   PetscBool        interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
694:   PetscBool        interpSerial   = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
695:   PetscBool        interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
696:   PetscBool        testHeavy      = user->testHeavy;
697:   PetscMPIInt      rank;

699:   PetscFunctionBegin;
700:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
701:   if (user->filename[0]) {
702:     PetscCall(CreateMeshFromFile(comm, user, dm, &serialDM));
703:   } else if (useGenerator) {
704:     PetscCall(PetscLogStagePush(stage[0]));
705:     PetscCall(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm));
706:     PetscCall(PetscLogStagePop());
707:   } else {
708:     PetscCall(PetscLogStagePush(stage[0]));
709:     switch (user->dim) {
710:     case 1:
711:       PetscCall(CreateMesh_1D(comm, interpCreate, user, dm));
712:       break;
713:     case 2:
714:       if (cellSimplex) {
715:         PetscCall(CreateSimplex_2D(comm, interpCreate, user, dm));
716:       } else {
717:         PetscCall(CreateQuad_2D(comm, interpCreate, user, dm));
718:       }
719:       break;
720:     case 3:
721:       if (cellSimplex) {
722:         PetscCall(CreateSimplex_3D(comm, interpCreate, user, dm));
723:       } else {
724:         PetscCall(CreateHex_3D(comm, interpCreate, user, dm));
725:       }
726:       break;
727:     default:
728:       SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %" PetscInt_FMT, user->dim);
729:     }
730:     PetscCall(PetscLogStagePop());
731:   }
732:   PetscCheck(user->ncoords % user->dim == 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %" PetscInt_FMT " must be divisible by spatial dimension %" PetscInt_FMT, user->ncoords, user->dim);
733:   PetscCall(PetscObjectSetName((PetscObject)*dm, "Original Mesh"));
734:   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));

736:   if (interpSerial) {
737:     DM idm;

739:     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
740:     PetscCall(PetscLogStagePush(stage[2]));
741:     PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
742:     PetscCall(PetscLogStagePop());
743:     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
744:     PetscCall(DMDestroy(dm));
745:     *dm = idm;
746:     PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Mesh"));
747:     PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
748:   }

750:   /* Set partitioner options */
751:   PetscCall(DMPlexGetPartitioner(*dm, &part));
752:   if (part) {
753:     PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
754:     PetscCall(PetscPartitionerSetFromOptions(part));
755:   }

757:   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
758:   if (testHeavy) {
759:     PetscBool distributed;

761:     PetscCall(DMPlexIsDistributed(*dm, &distributed));
762:     if (!serialDM && !distributed) {
763:       serialDM = *dm;
764:       PetscCall(PetscObjectReference((PetscObject)*dm));
765:     }
766:     if (serialDM) PetscCall(DMPlexGetExpandedBoundary_Private(serialDM, &boundary));
767:     if (boundary) {
768:       /* check DM which has been created in parallel and already interpolated */
769:       PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
770:     }
771:     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
772:     PetscCall(DMPlexOrientInterface_Internal(*dm));
773:   }
774:   if (user->distribute) {
775:     DM pdm = NULL;

777:     /* Redistribute mesh over processes using that partitioner */
778:     PetscCall(PetscLogStagePush(stage[1]));
779:     PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
780:     PetscCall(PetscLogStagePop());
781:     if (pdm) {
782:       PetscCall(DMDestroy(dm));
783:       *dm = pdm;
784:       PetscCall(PetscObjectSetName((PetscObject)*dm, "Redistributed Mesh"));
785:       PetscCall(DMViewFromOptions(*dm, NULL, "-dist_dm_view"));
786:     }

788:     if (interpParallel) {
789:       DM idm;

791:       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
792:       PetscCall(PetscLogStagePush(stage[2]));
793:       PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
794:       PetscCall(PetscLogStagePop());
795:       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
796:       PetscCall(DMDestroy(dm));
797:       *dm = idm;
798:       PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Redistributed Mesh"));
799:       PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
800:     }
801:   }
802:   if (testHeavy) {
803:     if (boundary) PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
804:     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
805:     PetscCall(DMPlexOrientInterface_Internal(*dm));
806:   }

808:   PetscCall(PetscObjectSetName((PetscObject)*dm, "Parallel Mesh"));
809:   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
810:   PetscCall(DMSetFromOptions(*dm));
811:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));

813:   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
814:   PetscCall(DMDestroy(&serialDM));
815:   PetscCall(PortableBoundaryDestroy(&boundary));
816:   PetscFunctionReturn(PETSC_SUCCESS);
817: }

819: #define ps2d(number) ((double)PetscRealPart(number))
820: static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol)
821: {
822:   PetscFunctionBegin;
823:   PetscCheck(dim <= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
824:   if (tol >= 1e-3) {
825:     switch (dim) {
826:     case 1:
827:       PetscCall(PetscSNPrintf(buf, len, "(%12.3f)", ps2d(coords[0])));
828:     case 2:
829:       PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1])));
830:     case 3:
831:       PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
832:     }
833:   } else {
834:     switch (dim) {
835:     case 1:
836:       PetscCall(PetscSNPrintf(buf, len, "(%12.6f)", ps2d(coords[0])));
837:     case 2:
838:       PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1])));
839:     case 3:
840:       PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
841:     }
842:   }
843:   PetscFunctionReturn(PETSC_SUCCESS);
844: }

846: static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer)
847: {
848:   PetscInt           dim, i, npoints;
849:   IS                 pointsIS;
850:   const PetscInt    *points;
851:   const PetscScalar *coords;
852:   char               coordstr[128];
853:   MPI_Comm           comm;
854:   PetscMPIInt        rank;

856:   PetscFunctionBegin;
857:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
858:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
859:   PetscCall(DMGetDimension(dm, &dim));
860:   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
861:   PetscCall(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS));
862:   PetscCall(ISGetIndices(pointsIS, &points));
863:   PetscCall(ISGetLocalSize(pointsIS, &npoints));
864:   PetscCall(VecGetArrayRead(coordsVec, &coords));
865:   for (i = 0; i < npoints; i++) {
866:     PetscCall(coord2str(coordstr, sizeof(coordstr), dim, &coords[i * dim], tol));
867:     if (rank == 0 && i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n"));
868:     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, coordstr, i, points[i]));
869:     PetscCall(PetscViewerFlush(viewer));
870:   }
871:   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
872:   PetscCall(VecRestoreArrayRead(coordsVec, &coords));
873:   PetscCall(ISRestoreIndices(pointsIS, &points));
874:   PetscCall(ISDestroy(&pointsIS));
875:   PetscFunctionReturn(PETSC_SUCCESS);
876: }

878: static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
879: {
880:   IS            is;
881:   PetscSection *sects;
882:   IS           *iss;
883:   PetscInt      d, depth;
884:   PetscMPIInt   rank;
885:   PetscViewer   viewer = PETSC_VIEWER_STDOUT_WORLD, sviewer;

887:   PetscFunctionBegin;
888:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
889:   if (user->testExpandPointsEmpty && rank == 0) {
890:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is));
891:   } else {
892:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is));
893:   }
894:   PetscCall(DMPlexGetConeRecursive(dm, is, &depth, &iss, &sects));
895:   PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
896:   PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n", rank));
897:   for (d = depth - 1; d >= 0; d--) {
898:     IS        checkIS;
899:     PetscBool flg;

901:     PetscCall(PetscViewerASCIIPrintf(sviewer, "depth %" PetscInt_FMT " ---------------\n", d));
902:     PetscCall(PetscSectionView(sects[d], sviewer));
903:     PetscCall(ISView(iss[d], sviewer));
904:     /* check reverse operation */
905:     if (d < depth - 1) {
906:       PetscCall(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS));
907:       PetscCall(ISEqualUnsorted(checkIS, iss[d + 1], &flg));
908:       PetscCheck(flg, PetscObjectComm((PetscObject)checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
909:       PetscCall(ISDestroy(&checkIS));
910:     }
911:   }
912:   PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
913:   PetscCall(PetscViewerFlush(viewer));
914:   PetscCall(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, &sects));
915:   PetscCall(ISDestroy(&is));
916:   PetscFunctionReturn(PETSC_SUCCESS);
917: }

919: static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
920: {
921:   PetscInt        n, n1, ncone, numCoveredPoints, o, p, q, start, end;
922:   const PetscInt *coveredPoints;
923:   const PetscInt *arr, *cone;
924:   PetscInt       *newarr;

926:   PetscFunctionBegin;
927:   PetscCall(ISGetLocalSize(is, &n));
928:   PetscCall(PetscSectionGetStorageSize(section, &n1));
929:   PetscCall(PetscSectionGetChart(section, &start, &end));
930:   PetscCheck(n == n1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %" PetscInt_FMT " != %" PetscInt_FMT " = section storage size", n, n1);
931:   PetscCall(ISGetIndices(is, &arr));
932:   PetscCall(PetscMalloc1(end - start, &newarr));
933:   for (q = start; q < end; q++) {
934:     PetscCall(PetscSectionGetDof(section, q, &ncone));
935:     PetscCall(PetscSectionGetOffset(section, q, &o));
936:     cone = &arr[o];
937:     if (ncone == 1) {
938:       numCoveredPoints = 1;
939:       p                = cone[0];
940:     } else {
941:       PetscInt i;
942:       p = PETSC_MAX_INT;
943:       for (i = 0; i < ncone; i++)
944:         if (cone[i] < 0) {
945:           p = -1;
946:           break;
947:         }
948:       if (p >= 0) {
949:         PetscCall(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
950:         PetscCheck(numCoveredPoints <= 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %" PetscInt_FMT, q);
951:         if (numCoveredPoints) p = coveredPoints[0];
952:         else p = -2;
953:         PetscCall(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
954:       }
955:     }
956:     newarr[q - start] = p;
957:   }
958:   PetscCall(ISRestoreIndices(is, &arr));
959:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, end - start, newarr, PETSC_OWN_POINTER, newis));
960:   PetscFunctionReturn(PETSC_SUCCESS);
961: }

963: static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
964: {
965:   PetscInt d;
966:   IS       is, newis;

968:   PetscFunctionBegin;
969:   is = boundary_expanded_is;
970:   PetscCall(PetscObjectReference((PetscObject)is));
971:   for (d = 0; d < depth - 1; ++d) {
972:     PetscCall(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis));
973:     PetscCall(ISDestroy(&is));
974:     is = newis;
975:   }
976:   *boundary_is = is;
977:   PetscFunctionReturn(PETSC_SUCCESS);
978: }

980: #define CHKERRQI(incall, ierr) \
981:   if (ierr) incall = PETSC_FALSE;

983: static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
984: {
985:   PetscViewer       viewer;
986:   PetscBool         flg;
987:   static PetscBool  incall = PETSC_FALSE;
988:   PetscViewerFormat format;

990:   PetscFunctionBegin;
991:   if (incall) PetscFunctionReturn(PETSC_SUCCESS);
992:   incall = PETSC_TRUE;
993:   CHKERRQI(incall, PetscOptionsGetViewer(comm, ((PetscObject)label)->options, ((PetscObject)label)->prefix, optionname, &viewer, &format, &flg));
994:   if (flg) {
995:     CHKERRQI(incall, PetscViewerPushFormat(viewer, format));
996:     CHKERRQI(incall, DMLabelView(label, viewer));
997:     CHKERRQI(incall, PetscViewerPopFormat(viewer));
998:     CHKERRQI(incall, PetscViewerDestroy(&viewer));
999:   }
1000:   incall = PETSC_FALSE;
1001:   PetscFunctionReturn(PETSC_SUCCESS);
1002: }

1004: /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
1005: static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
1006: {
1007:   IS tmpis;

1009:   PetscFunctionBegin;
1010:   PetscCall(DMLabelGetStratumIS(label, value, &tmpis));
1011:   if (!tmpis) PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis));
1012:   PetscCall(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is));
1013:   PetscCall(ISDestroy(&tmpis));
1014:   PetscFunctionReturn(PETSC_SUCCESS);
1015: }

1017: /* currently only for simple PetscSection without fields or constraints */
1018: static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
1019: {
1020:   PetscSection sec;
1021:   PetscInt     chart[2], p;
1022:   PetscInt    *dofarr;
1023:   PetscMPIInt  rank;

1025:   PetscFunctionBegin;
1026:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1027:   if (rank == rootrank) PetscCall(PetscSectionGetChart(sec0, &chart[0], &chart[1]));
1028:   PetscCallMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm));
1029:   PetscCall(PetscMalloc1(chart[1] - chart[0], &dofarr));
1030:   if (rank == rootrank) {
1031:     for (p = chart[0]; p < chart[1]; p++) PetscCall(PetscSectionGetDof(sec0, p, &dofarr[p - chart[0]]));
1032:   }
1033:   PetscCallMPI(MPI_Bcast(dofarr, chart[1] - chart[0], MPIU_INT, rootrank, comm));
1034:   PetscCall(PetscSectionCreate(comm, &sec));
1035:   PetscCall(PetscSectionSetChart(sec, chart[0], chart[1]));
1036:   for (p = chart[0]; p < chart[1]; p++) PetscCall(PetscSectionSetDof(sec, p, dofarr[p - chart[0]]));
1037:   PetscCall(PetscSectionSetUp(sec));
1038:   PetscCall(PetscFree(dofarr));
1039:   *secout = sec;
1040:   PetscFunctionReturn(PETSC_SUCCESS);
1041: }

1043: static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is)
1044: {
1045:   IS faces_expanded_is;

1047:   PetscFunctionBegin;
1048:   PetscCall(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is));
1049:   PetscCall(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is));
1050:   PetscCall(ISDestroy(&faces_expanded_is));
1051:   PetscFunctionReturn(PETSC_SUCCESS);
1052: }

1054: /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1055: static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
1056: {
1057:   PetscOptions     options = NULL;
1058:   const char      *prefix  = NULL;
1059:   const char       opt[]   = "-dm_plex_interpolate_orient_interfaces";
1060:   char             prefix_opt[512];
1061:   PetscBool        flg, set;
1062:   static PetscBool wasSetTrue = PETSC_FALSE;

1064:   PetscFunctionBegin;
1065:   if (dm) {
1066:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1067:     options = ((PetscObject)dm)->options;
1068:   }
1069:   PetscCall(PetscStrncpy(prefix_opt, "-", sizeof(prefix_opt)));
1070:   PetscCall(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt)));
1071:   PetscCall(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt)));
1072:   PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1073:   if (!enable) {
1074:     if (set && flg) wasSetTrue = PETSC_TRUE;
1075:     PetscCall(PetscOptionsSetValue(options, prefix_opt, "0"));
1076:   } else if (set && !flg) {
1077:     if (wasSetTrue) {
1078:       PetscCall(PetscOptionsSetValue(options, prefix_opt, "1"));
1079:     } else {
1080:       /* default is PETSC_TRUE */
1081:       PetscCall(PetscOptionsClearValue(options, prefix_opt));
1082:     }
1083:     wasSetTrue = PETSC_FALSE;
1084:   }
1085:   if (PetscDefined(USE_DEBUG)) {
1086:     PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1087:     PetscCheck(!set || flg == enable, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
1088:   }
1089:   PetscFunctionReturn(PETSC_SUCCESS);
1090: }

1092: /* get coordinate description of the whole-domain boundary */
1093: static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
1094: {
1095:   PortableBoundary       bnd0, bnd;
1096:   MPI_Comm               comm;
1097:   DM                     idm;
1098:   DMLabel                label;
1099:   PetscInt               d;
1100:   const char             boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1101:   IS                     boundary_is;
1102:   IS                    *boundary_expanded_iss;
1103:   PetscMPIInt            rootrank = 0;
1104:   PetscMPIInt            rank, size;
1105:   PetscInt               value = 1;
1106:   DMPlexInterpolatedFlag intp;
1107:   PetscBool              flg;

1109:   PetscFunctionBegin;
1110:   PetscCall(PetscNew(&bnd));
1111:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1112:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1113:   PetscCallMPI(MPI_Comm_size(comm, &size));
1114:   PetscCall(DMPlexIsDistributed(dm, &flg));
1115:   PetscCheck(!flg, comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");

1117:   /* interpolate serial DM if not yet interpolated */
1118:   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1119:   if (intp == DMPLEX_INTERPOLATED_FULL) {
1120:     idm = dm;
1121:     PetscCall(PetscObjectReference((PetscObject)dm));
1122:   } else {
1123:     PetscCall(DMPlexInterpolate(dm, &idm));
1124:     PetscCall(DMViewFromOptions(idm, NULL, "-idm_view"));
1125:   }

1127:   /* mark whole-domain boundary of the serial DM */
1128:   PetscCall(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label));
1129:   PetscCall(DMAddLabel(idm, label));
1130:   PetscCall(DMPlexMarkBoundaryFaces(idm, value, label));
1131:   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm));
1132:   PetscCall(DMLabelGetStratumIS(label, value, &boundary_is));

1134:   /* translate to coordinates */
1135:   PetscCall(PetscNew(&bnd0));
1136:   PetscCall(DMGetCoordinatesLocalSetUp(idm));
1137:   if (rank == rootrank) {
1138:     PetscCall(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1139:     PetscCall(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates));
1140:     /* self-check */
1141:     {
1142:       IS is0;
1143:       PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0));
1144:       PetscCall(ISEqual(is0, boundary_is, &flg));
1145:       PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
1146:       PetscCall(ISDestroy(&is0));
1147:     }
1148:   } else {
1149:     PetscCall(VecCreateFromOptions(PETSC_COMM_SELF, NULL, 1, 0, 0, &bnd0->coordinates));
1150:   }

1152:   {
1153:     Vec        tmp;
1154:     VecScatter sc;
1155:     IS         xis;
1156:     PetscInt   n;

1158:     /* just convert seq vectors to mpi vector */
1159:     PetscCall(VecGetLocalSize(bnd0->coordinates, &n));
1160:     PetscCallMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm));
1161:     if (rank == rootrank) {
1162:       PetscCall(VecCreateFromOptions(comm, NULL, 1, n, n, &tmp));
1163:     } else {
1164:       PetscCall(VecCreateFromOptions(comm, NULL, 1, 0, n, &tmp));
1165:     }
1166:     PetscCall(VecCopy(bnd0->coordinates, tmp));
1167:     PetscCall(VecDestroy(&bnd0->coordinates));
1168:     bnd0->coordinates = tmp;

1170:     /* replicate coordinates from root rank to all ranks */
1171:     PetscCall(VecCreateFromOptions(comm, NULL, 1, n, n * size, &bnd->coordinates));
1172:     PetscCall(ISCreateStride(comm, n, 0, 1, &xis));
1173:     PetscCall(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc));
1174:     PetscCall(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1175:     PetscCall(VecScatterEnd(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1176:     PetscCall(VecScatterDestroy(&sc));
1177:     PetscCall(ISDestroy(&xis));
1178:   }
1179:   bnd->depth = bnd0->depth;
1180:   PetscCallMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm));
1181:   PetscCall(PetscMalloc1(bnd->depth, &bnd->sections));
1182:   for (d = 0; d < bnd->depth; d++) PetscCall(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]));

1184:   if (rank == rootrank) PetscCall(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1185:   PetscCall(PortableBoundaryDestroy(&bnd0));
1186:   PetscCall(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE));
1187:   PetscCall(DMLabelDestroy(&label));
1188:   PetscCall(ISDestroy(&boundary_is));
1189:   PetscCall(DMDestroy(&idm));
1190:   *boundary = bnd;
1191:   PetscFunctionReturn(PETSC_SUCCESS);
1192: }

1194: /* get faces of inter-partition interface */
1195: static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
1196: {
1197:   MPI_Comm               comm;
1198:   DMLabel                label;
1199:   IS                     part_boundary_faces_is;
1200:   const char             partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1201:   PetscInt               value              = 1;
1202:   DMPlexInterpolatedFlag intp;

1204:   PetscFunctionBegin;
1205:   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1206:   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1207:   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");

1209:   /* get ipdm partition boundary (partBoundary) */
1210:   {
1211:     PetscSF sf;

1213:     PetscCall(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label));
1214:     PetscCall(DMAddLabel(ipdm, label));
1215:     PetscCall(DMGetPointSF(ipdm, &sf));
1216:     PetscCall(DMSetPointSF(ipdm, NULL));
1217:     PetscCall(DMPlexMarkBoundaryFaces(ipdm, value, label));
1218:     PetscCall(DMSetPointSF(ipdm, sf));
1219:     PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm));
1220:     PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is));
1221:     PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1222:     PetscCall(DMLabelDestroy(&label));
1223:   }

1225:   /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1226:   PetscCall(ISDifference(part_boundary_faces_is, boundary_faces_is, interface_faces_is));
1227:   PetscCall(ISDestroy(&part_boundary_faces_is));
1228:   PetscFunctionReturn(PETSC_SUCCESS);
1229: }

1231: /* compute inter-partition interface including edges and vertices */
1232: static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
1233: {
1234:   DMLabel                label;
1235:   PetscInt               value           = 1;
1236:   const char             interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1237:   DMPlexInterpolatedFlag intp;
1238:   MPI_Comm               comm;

1240:   PetscFunctionBegin;
1241:   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1242:   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1243:   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");

1245:   PetscCall(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label));
1246:   PetscCall(DMAddLabel(ipdm, label));
1247:   PetscCall(DMLabelSetStratumIS(label, value, interface_faces_is));
1248:   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm));
1249:   PetscCall(DMPlexLabelComplete(ipdm, label));
1250:   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm));
1251:   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is));
1252:   PetscCall(PetscObjectSetName((PetscObject)*interface_is, "interface_is"));
1253:   PetscCall(ISViewFromOptions(*interface_is, NULL, "-interface_is_view"));
1254:   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1255:   PetscCall(DMLabelDestroy(&label));
1256:   PetscFunctionReturn(PETSC_SUCCESS);
1257: }

1259: static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
1260: {
1261:   PetscInt        n;
1262:   const PetscInt *arr;

1264:   PetscFunctionBegin;
1265:   PetscCall(PetscSFGetGraph(sf, NULL, &n, &arr, NULL));
1266:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is));
1267:   PetscFunctionReturn(PETSC_SUCCESS);
1268: }

1270: static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
1271: {
1272:   PetscInt        n;
1273:   const PetscInt *rootdegree;
1274:   PetscInt       *arr;

1276:   PetscFunctionBegin;
1277:   PetscCall(PetscSFSetUp(sf));
1278:   PetscCall(PetscSFComputeDegreeBegin(sf, &rootdegree));
1279:   PetscCall(PetscSFComputeDegreeEnd(sf, &rootdegree));
1280:   PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr));
1281:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is));
1282:   PetscFunctionReturn(PETSC_SUCCESS);
1283: }

1285: static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
1286: {
1287:   IS pointSF_out_is, pointSF_in_is;

1289:   PetscFunctionBegin;
1290:   PetscCall(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is));
1291:   PetscCall(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is));
1292:   PetscCall(ISExpand(pointSF_out_is, pointSF_in_is, is));
1293:   PetscCall(ISDestroy(&pointSF_out_is));
1294:   PetscCall(ISDestroy(&pointSF_in_is));
1295:   PetscFunctionReturn(PETSC_SUCCESS);
1296: }

1298: #define CHKERRMY(ierr) PetscCheck(!ierr, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!")

1300: static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
1301: {
1302:   DMLabel         label;
1303:   PetscSection    coordsSection;
1304:   Vec             coordsVec;
1305:   PetscScalar    *coordsScalar;
1306:   PetscInt        coneSize, depth, dim, i, p, npoints;
1307:   const PetscInt *points;

1309:   PetscFunctionBegin;
1310:   PetscCall(DMGetDimension(dm, &dim));
1311:   PetscCall(DMGetCoordinateSection(dm, &coordsSection));
1312:   PetscCall(DMGetCoordinatesLocal(dm, &coordsVec));
1313:   PetscCall(VecGetArray(coordsVec, &coordsScalar));
1314:   PetscCall(ISGetLocalSize(pointsIS, &npoints));
1315:   PetscCall(ISGetIndices(pointsIS, &points));
1316:   PetscCall(DMPlexGetDepthLabel(dm, &label));
1317:   PetscCall(PetscViewerASCIIPushTab(v));
1318:   for (i = 0; i < npoints; i++) {
1319:     p = points[i];
1320:     PetscCall(DMLabelGetValue(label, p, &depth));
1321:     if (!depth) {
1322:       PetscInt n, o;
1323:       char     coordstr[128];

1325:       PetscCall(PetscSectionGetDof(coordsSection, p, &n));
1326:       PetscCall(PetscSectionGetOffset(coordsSection, p, &o));
1327:       PetscCall(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0));
1328:       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "vertex %" PetscInt_FMT " w/ coordinates %s\n", p, coordstr));
1329:     } else {
1330:       char entityType[16];

1332:       switch (depth) {
1333:       case 1:
1334:         PetscCall(PetscStrncpy(entityType, "edge", sizeof(entityType)));
1335:         break;
1336:       case 2:
1337:         PetscCall(PetscStrncpy(entityType, "face", sizeof(entityType)));
1338:         break;
1339:       case 3:
1340:         PetscCall(PetscStrncpy(entityType, "cell", sizeof(entityType)));
1341:         break;
1342:       default:
1343:         SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1344:       }
1345:       if (depth == dim && dim < 3) PetscCall(PetscStrlcat(entityType, " (cell)", sizeof(entityType)));
1346:       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "%s %" PetscInt_FMT "\n", entityType, p));
1347:     }
1348:     PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
1349:     if (coneSize) {
1350:       const PetscInt *cone;
1351:       IS              coneIS;

1353:       PetscCall(DMPlexGetCone(dm, p, &cone));
1354:       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS));
1355:       PetscCall(ViewPointsWithType_Internal(dm, coneIS, v));
1356:       PetscCall(ISDestroy(&coneIS));
1357:     }
1358:   }
1359:   PetscCall(PetscViewerASCIIPopTab(v));
1360:   PetscCall(VecRestoreArray(coordsVec, &coordsScalar));
1361:   PetscCall(ISRestoreIndices(pointsIS, &points));
1362:   PetscFunctionReturn(PETSC_SUCCESS);
1363: }

1365: static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v)
1366: {
1367:   PetscBool   flg;
1368:   PetscInt    npoints;
1369:   PetscMPIInt rank;

1371:   PetscFunctionBegin;
1372:   PetscCall(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg));
1373:   PetscCheck(flg, PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer");
1374:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank));
1375:   PetscCall(PetscViewerASCIIPushSynchronized(v));
1376:   PetscCall(ISGetLocalSize(points, &npoints));
1377:   if (npoints) {
1378:     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
1379:     PetscCall(ViewPointsWithType_Internal(dm, points, v));
1380:   }
1381:   PetscCall(PetscViewerFlush(v));
1382:   PetscCall(PetscViewerASCIIPopSynchronized(v));
1383:   PetscFunctionReturn(PETSC_SUCCESS);
1384: }

1386: static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is)
1387: {
1388:   PetscSF     pointsf;
1389:   IS          pointsf_is;
1390:   PetscBool   flg;
1391:   MPI_Comm    comm;
1392:   PetscMPIInt size;

1394:   PetscFunctionBegin;
1395:   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1396:   PetscCallMPI(MPI_Comm_size(comm, &size));
1397:   PetscCall(DMGetPointSF(ipdm, &pointsf));
1398:   if (pointsf) {
1399:     PetscInt nroots;
1400:     PetscCall(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL));
1401:     if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1402:   }
1403:   if (!pointsf) {
1404:     PetscInt N = 0;
1405:     if (interface_is) PetscCall(ISGetSize(interface_is, &N));
1406:     PetscCheck(!N, comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL");
1407:     PetscFunctionReturn(PETSC_SUCCESS);
1408:   }

1410:   /* get PointSF points as IS pointsf_is */
1411:   PetscCall(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is));

1413:   /* compare pointsf_is with interface_is */
1414:   PetscCall(ISEqual(interface_is, pointsf_is, &flg));
1415:   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &flg, 1, MPIU_BOOL, MPI_LAND, comm));
1416:   if (!flg) {
1417:     IS          pointsf_extra_is, pointsf_missing_is;
1418:     PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1419:     CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1420:     CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1421:     CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1422:     CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1423:     CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1424:     CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1425:     CHKERRMY(ISDestroy(&pointsf_extra_is));
1426:     CHKERRMY(ISDestroy(&pointsf_missing_is));
1427:     SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1428:   }
1429:   PetscCall(ISDestroy(&pointsf_is));
1430:   PetscFunctionReturn(PETSC_SUCCESS);
1431: }

1433: /* remove faces & edges from label, leave just vertices */
1434: static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points)
1435: {
1436:   PetscInt vStart, vEnd;
1437:   MPI_Comm comm;

1439:   PetscFunctionBegin;
1440:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1441:   PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
1442:   PetscCall(ISGeneralFilter(points, vStart, vEnd));
1443:   PetscFunctionReturn(PETSC_SUCCESS);
1444: }

1446: /*
1447:   DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.

1449:   Collective

1451:   Input Parameter:
1452: . dm - The DMPlex object

1454:   Notes:
1455:   The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1456:   This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1457:   This is mainly intended for debugging/testing purposes.

1459:   Algorithm:
1460:   1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1461:   2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1462:   3. the mesh is distributed or loaded in parallel
1463:   4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1464:   5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1465:   6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1466:   7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error

1468:   Level: developer

1470: .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1471: */
1472: static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd)
1473: {
1474:   DM                     ipdm = NULL;
1475:   IS                     boundary_faces_is, interface_faces_is, interface_is;
1476:   DMPlexInterpolatedFlag intp;
1477:   MPI_Comm               comm;

1479:   PetscFunctionBegin;
1480:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));

1482:   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1483:   if (intp == DMPLEX_INTERPOLATED_FULL) {
1484:     ipdm = dm;
1485:   } else {
1486:     /* create temporary interpolated DM if input DM is not interpolated */
1487:     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE));
1488:     PetscCall(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1489:     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE));
1490:   }
1491:   PetscCall(DMViewFromOptions(ipdm, NULL, "-ipdm_view"));

1493:   /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1494:   PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is));
1495:   /* get inter-partition interface faces (interface_faces_is)*/
1496:   PetscCall(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is));
1497:   /* compute inter-partition interface including edges and vertices (interface_is) */
1498:   PetscCall(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is));
1499:   /* destroy immediate ISs */
1500:   PetscCall(ISDestroy(&boundary_faces_is));
1501:   PetscCall(ISDestroy(&interface_faces_is));

1503:   /* for uninterpolated case, keep just vertices in interface */
1504:   if (!intp) {
1505:     PetscCall(DMPlexISFilterVertices_Private(ipdm, interface_is));
1506:     PetscCall(DMDestroy(&ipdm));
1507:   }

1509:   /* compare PointSF with the boundary reconstructed from coordinates */
1510:   PetscCall(DMPlexComparePointSFWithInterface_Private(dm, interface_is));
1511:   PetscCall(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n"));
1512:   PetscCall(ISDestroy(&interface_is));
1513:   PetscFunctionReturn(PETSC_SUCCESS);
1514: }

1516: int main(int argc, char **argv)
1517: {
1518:   DM     dm;
1519:   AppCtx user;

1521:   PetscFunctionBeginUser;
1522:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
1523:   PetscCall(PetscLogStageRegister("create", &stage[0]));
1524:   PetscCall(PetscLogStageRegister("distribute", &stage[1]));
1525:   PetscCall(PetscLogStageRegister("interpolate", &stage[2]));
1526:   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
1527:   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
1528:   if (user.nPointsToExpand) PetscCall(TestExpandPoints(dm, &user));
1529:   if (user.ncoords) {
1530:     Vec coords;

1532:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords));
1533:     PetscCall(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD));
1534:     PetscCall(VecDestroy(&coords));
1535:   }
1536:   PetscCall(DMDestroy(&dm));
1537:   PetscCall(PetscFinalize());
1538:   return 0;
1539: }

1541: /*TEST

1543:   testset:
1544:     nsize: 2
1545:     args: -dm_view ascii::ascii_info_detail
1546:     args: -dm_plex_check_all
1547:     test:
1548:       suffix: 1_tri_dist0
1549:       args: -distribute 0 -interpolate {{none create}separate output}
1550:     test:
1551:       suffix: 1_tri_dist1
1552:       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1553:     test:
1554:       suffix: 1_quad_dist0
1555:       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1556:     test:
1557:       suffix: 1_quad_dist1
1558:       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1559:     test:
1560:       suffix: 1_1d_dist1
1561:       args: -dim 1 -distribute 1

1563:   testset:
1564:     nsize: 3
1565:     args: -testnum 1 -interpolate create
1566:     args: -dm_plex_check_all
1567:     test:
1568:       suffix: 2
1569:       args: -dm_view ascii::ascii_info_detail
1570:     test:
1571:       suffix: 2a
1572:       args: -dm_plex_check_cones_conform_on_interfaces_verbose
1573:     test:
1574:       suffix: 2b
1575:       args: -test_expand_points 0,1,2,5,6
1576:     test:
1577:       suffix: 2c
1578:       args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty

1580:   testset:
1581:     # the same as 1% for 3D
1582:     nsize: 2
1583:     args: -dim 3 -dm_view ascii::ascii_info_detail
1584:     args: -dm_plex_check_all
1585:     test:
1586:       suffix: 4_tet_dist0
1587:       args: -distribute 0 -interpolate {{none create}separate output}
1588:     test:
1589:       suffix: 4_tet_dist1
1590:       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1591:     test:
1592:       suffix: 4_hex_dist0
1593:       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1594:     test:
1595:       suffix: 4_hex_dist1
1596:       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}

1598:   test:
1599:     # the same as 4_tet_dist0 but test different initial orientations
1600:     suffix: 4_tet_test_orient
1601:     nsize: 2
1602:     args: -dim 3 -distribute 0
1603:     args: -dm_plex_check_all
1604:     args: -rotate_interface_0 {{0 1 2 11 12 13}}
1605:     args: -rotate_interface_1 {{0 1 2 11 12 13}}

1607:   testset:
1608:     requires: exodusii
1609:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1610:     args: -dm_view ascii::ascii_info_detail
1611:     args: -dm_plex_check_all
1612:     args: -custom_view
1613:     test:
1614:       suffix: 5_seq
1615:       nsize: 1
1616:       args: -distribute 0 -interpolate {{none create}separate output}
1617:     test:
1618:       # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1619:       suffix: 5_dist0
1620:       nsize: 2
1621:       args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1622:     test:
1623:       suffix: 5_dist1
1624:       nsize: 2
1625:       args: -distribute 1 -interpolate {{none create after_distribute}separate output}

1627:   testset:
1628:     nsize: {{1 2 4}}
1629:     args: -use_generator
1630:     args: -dm_plex_check_all
1631:     args: -distribute -interpolate none
1632:     test:
1633:       suffix: 6_tri
1634:       requires: triangle
1635:       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1636:     test:
1637:       suffix: 6_quad
1638:       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1639:     test:
1640:       suffix: 6_tet
1641:       requires: ctetgen
1642:       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1643:     test:
1644:       suffix: 6_hex
1645:       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1646:   testset:
1647:     nsize: {{1 2 4}}
1648:     args: -use_generator
1649:     args: -dm_plex_check_all
1650:     args: -distribute -interpolate create
1651:     test:
1652:       suffix: 6_int_tri
1653:       requires: triangle
1654:       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1655:     test:
1656:       suffix: 6_int_quad
1657:       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1658:     test:
1659:       suffix: 6_int_tet
1660:       requires: ctetgen
1661:       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1662:     test:
1663:       suffix: 6_int_hex
1664:       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1665:   testset:
1666:     nsize: {{2 4}}
1667:     args: -use_generator
1668:     args: -dm_plex_check_all
1669:     args: -distribute -interpolate after_distribute
1670:     test:
1671:       suffix: 6_parint_tri
1672:       requires: triangle
1673:       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1674:     test:
1675:       suffix: 6_parint_quad
1676:       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1677:     test:
1678:       suffix: 6_parint_tet
1679:       requires: ctetgen
1680:       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1681:     test:
1682:       suffix: 6_parint_hex
1683:       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0

1685:   testset: # 7 EXODUS
1686:     requires: exodusii
1687:     args: -dm_plex_check_all
1688:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1689:     args: -distribute
1690:     test: # seq load, simple partitioner
1691:       suffix: 7_exo
1692:       nsize: {{1 2 4 5}}
1693:       args: -interpolate none
1694:     test: # seq load, seq interpolation, simple partitioner
1695:       suffix: 7_exo_int_simple
1696:       nsize: {{1 2 4 5}}
1697:       args: -interpolate create
1698:     test: # seq load, seq interpolation, metis partitioner
1699:       suffix: 7_exo_int_metis
1700:       requires: parmetis
1701:       nsize: {{2 4 5}}
1702:       args: -interpolate create
1703:       args: -petscpartitioner_type parmetis
1704:     test: # seq load, simple partitioner, par interpolation
1705:       suffix: 7_exo_simple_int
1706:       nsize: {{2 4 5}}
1707:       args: -interpolate after_distribute
1708:     test: # seq load, metis partitioner, par interpolation
1709:       suffix: 7_exo_metis_int
1710:       requires: parmetis
1711:       nsize: {{2 4 5}}
1712:       args: -interpolate after_distribute
1713:       args: -petscpartitioner_type parmetis

1715:   testset: # 7 HDF5 SEQUANTIAL LOAD
1716:     requires: hdf5 !complex
1717:     args: -dm_plex_check_all
1718:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1719:     args: -dm_plex_hdf5_force_sequential
1720:     args: -distribute
1721:     test: # seq load, simple partitioner
1722:       suffix: 7_seq_hdf5_simple
1723:       nsize: {{1 2 4 5}}
1724:       args: -interpolate none
1725:     test: # seq load, seq interpolation, simple partitioner
1726:       suffix: 7_seq_hdf5_int_simple
1727:       nsize: {{1 2 4 5}}
1728:       args: -interpolate after_create
1729:     test: # seq load, seq interpolation, metis partitioner
1730:       nsize: {{2 4 5}}
1731:       suffix: 7_seq_hdf5_int_metis
1732:       requires: parmetis
1733:       args: -interpolate after_create
1734:       args: -petscpartitioner_type parmetis
1735:     test: # seq load, simple partitioner, par interpolation
1736:       suffix: 7_seq_hdf5_simple_int
1737:       nsize: {{2 4 5}}
1738:       args: -interpolate after_distribute
1739:     test: # seq load, metis partitioner, par interpolation
1740:       nsize: {{2 4 5}}
1741:       suffix: 7_seq_hdf5_metis_int
1742:       requires: parmetis
1743:       args: -interpolate after_distribute
1744:       args: -petscpartitioner_type parmetis

1746:   testset: # 7 HDF5 PARALLEL LOAD
1747:     requires: hdf5 !complex
1748:     nsize: {{2 4 5}}
1749:     args: -dm_plex_check_all
1750:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1751:     test: # par load
1752:       suffix: 7_par_hdf5
1753:       args: -interpolate none
1754:     test: # par load, par interpolation
1755:       suffix: 7_par_hdf5_int
1756:       args: -interpolate after_create
1757:     test: # par load, parmetis repartitioner
1758:       TODO: Parallel partitioning of uninterpolated meshes not supported
1759:       suffix: 7_par_hdf5_parmetis
1760:       requires: parmetis
1761:       args: -distribute -petscpartitioner_type parmetis
1762:       args: -interpolate none
1763:     test: # par load, par interpolation, parmetis repartitioner
1764:       suffix: 7_par_hdf5_int_parmetis
1765:       requires: parmetis
1766:       args: -distribute -petscpartitioner_type parmetis
1767:       args: -interpolate after_create
1768:     test: # par load, parmetis partitioner, par interpolation
1769:       TODO: Parallel partitioning of uninterpolated meshes not supported
1770:       suffix: 7_par_hdf5_parmetis_int
1771:       requires: parmetis
1772:       args: -distribute -petscpartitioner_type parmetis
1773:       args: -interpolate after_distribute

1775:     test:
1776:       suffix: 7_hdf5_hierarch
1777:       requires: hdf5 ptscotch !complex
1778:       nsize: {{2 3 4}separate output}
1779:       args: -distribute
1780:       args: -interpolate after_create
1781:       args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1782:       args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1783:       args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch

1785:   test:
1786:     suffix: 8
1787:     requires: hdf5 !complex
1788:     nsize: 4
1789:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1790:     args: -distribute 0 -interpolate after_create
1791:     args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1792:     args: -dm_plex_check_all
1793:     args: -custom_view

1795:   testset: # 9 HDF5 SEQUANTIAL LOAD
1796:     requires: hdf5 !complex datafilespath
1797:     args: -dm_plex_check_all
1798:     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1799:     args: -dm_plex_hdf5_force_sequential
1800:     args: -distribute
1801:     test: # seq load, simple partitioner
1802:       suffix: 9_seq_hdf5_simple
1803:       nsize: {{1 2 4 5}}
1804:       args: -interpolate none
1805:     test: # seq load, seq interpolation, simple partitioner
1806:       suffix: 9_seq_hdf5_int_simple
1807:       nsize: {{1 2 4 5}}
1808:       args: -interpolate after_create
1809:     test: # seq load, seq interpolation, metis partitioner
1810:       nsize: {{2 4 5}}
1811:       suffix: 9_seq_hdf5_int_metis
1812:       requires: parmetis
1813:       args: -interpolate after_create
1814:       args: -petscpartitioner_type parmetis
1815:     test: # seq load, simple partitioner, par interpolation
1816:       suffix: 9_seq_hdf5_simple_int
1817:       nsize: {{2 4 5}}
1818:       args: -interpolate after_distribute
1819:     test: # seq load, simple partitioner, par interpolation
1820:       # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1821:       # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1822:       # We can then provide an intentionally broken mesh instead.
1823:       TODO: This test is broken because PointSF is fixed.
1824:       suffix: 9_seq_hdf5_simple_int_err
1825:       nsize: 4
1826:       args: -interpolate after_distribute
1827:       filter: sed -e "/PETSC ERROR/,$$d"
1828:     test: # seq load, metis partitioner, par interpolation
1829:       nsize: {{2 4 5}}
1830:       suffix: 9_seq_hdf5_metis_int
1831:       requires: parmetis
1832:       args: -interpolate after_distribute
1833:       args: -petscpartitioner_type parmetis

1835:   testset: # 9 HDF5 PARALLEL LOAD
1836:     requires: hdf5 !complex datafilespath
1837:     nsize: {{2 4 5}}
1838:     args: -dm_plex_check_all
1839:     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1840:     test: # par load
1841:       suffix: 9_par_hdf5
1842:       args: -interpolate none
1843:     test: # par load, par interpolation
1844:       suffix: 9_par_hdf5_int
1845:       args: -interpolate after_create
1846:     test: # par load, parmetis repartitioner
1847:       TODO: Parallel partitioning of uninterpolated meshes not supported
1848:       suffix: 9_par_hdf5_parmetis
1849:       requires: parmetis
1850:       args: -distribute -petscpartitioner_type parmetis
1851:       args: -interpolate none
1852:     test: # par load, par interpolation, parmetis repartitioner
1853:       suffix: 9_par_hdf5_int_parmetis
1854:       requires: parmetis
1855:       args: -distribute -petscpartitioner_type parmetis
1856:       args: -interpolate after_create
1857:     test: # par load, parmetis partitioner, par interpolation
1858:       TODO: Parallel partitioning of uninterpolated meshes not supported
1859:       suffix: 9_par_hdf5_parmetis_int
1860:       requires: parmetis
1861:       args: -distribute -petscpartitioner_type parmetis
1862:       args: -interpolate after_distribute

1864:   testset: # 10 HDF5 PARALLEL LOAD
1865:     requires: hdf5 !complex datafilespath
1866:     nsize: {{2 4 7}}
1867:     args: -dm_plex_check_all
1868:     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1869:     test: # par load, par interpolation
1870:       suffix: 10_par_hdf5_int
1871:       args: -interpolate after_create
1872: TEST*/