Actual source code: ex1.c

  1: static char help[] = "Tests various DMPlex routines to construct, refine and distribute a mesh.\n\n";

  3: #include <petscdmplex.h>
  4: #include <petscdmplextransform.h>
  5: #include <petscsf.h>

  7: enum {
  8:   STAGE_LOAD,
  9:   STAGE_DISTRIBUTE,
 10:   STAGE_REFINE,
 11:   STAGE_OVERLAP
 12: };

 14: typedef struct {
 15:   PetscLogEvent createMeshEvent;
 16:   PetscLogStage stages[4];
 17:   /* Domain and mesh definition */
 18:   PetscInt  dim;     /* The topological mesh dimension */
 19:   PetscInt  overlap; /* The cell overlap to use during partitioning */
 20:   PetscBool testp4est[2];
 21:   PetscBool redistribute;
 22:   PetscBool final_ref;         /* Run refinement at the end */
 23:   PetscBool final_diagnostics; /* Run diagnostics on the final mesh */
 24: } AppCtx;

 26: PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 27: {
 28:   PetscFunctionBegin;
 29:   options->dim               = 2;
 30:   options->overlap           = 0;
 31:   options->testp4est[0]      = PETSC_FALSE;
 32:   options->testp4est[1]      = PETSC_FALSE;
 33:   options->redistribute      = PETSC_FALSE;
 34:   options->final_ref         = PETSC_FALSE;
 35:   options->final_diagnostics = PETSC_TRUE;

 37:   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
 38:   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex1.c", options->dim, &options->dim, NULL, 1, 3));
 39:   PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex1.c", options->overlap, &options->overlap, NULL, 0));
 40:   PetscCall(PetscOptionsBool("-test_p4est_seq", "Test p4est with sequential base DM", "ex1.c", options->testp4est[0], &options->testp4est[0], NULL));
 41:   PetscCall(PetscOptionsBool("-test_p4est_par", "Test p4est with parallel base DM", "ex1.c", options->testp4est[1], &options->testp4est[1], NULL));
 42:   PetscCall(PetscOptionsBool("-test_redistribute", "Test redistribution", "ex1.c", options->redistribute, &options->redistribute, NULL));
 43:   PetscCall(PetscOptionsBool("-final_ref", "Run uniform refinement on the final mesh", "ex1.c", options->final_ref, &options->final_ref, NULL));
 44:   PetscCall(PetscOptionsBool("-final_diagnostics", "Run diagnostics on the final mesh", "ex1.c", options->final_diagnostics, &options->final_diagnostics, NULL));
 45:   PetscOptionsEnd();

 47:   PetscCall(PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent));
 48:   PetscCall(PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]));
 49:   PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
 50:   PetscCall(PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]));
 51:   PetscCall(PetscLogStageRegister("MeshOverlap", &options->stages[STAGE_OVERLAP]));
 52:   PetscFunctionReturn(PETSC_SUCCESS);
 53: }

 55: PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
 56: {
 57:   PetscInt    dim           = user->dim;
 58:   PetscBool   testp4est_seq = user->testp4est[0];
 59:   PetscBool   testp4est_par = user->testp4est[1];
 60:   PetscMPIInt rank, size;

 62:   PetscFunctionBegin;
 63:   PetscCall(PetscLogEventBegin(user->createMeshEvent, 0, 0, 0, 0));
 64:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 65:   PetscCallMPI(MPI_Comm_size(comm, &size));
 66:   PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
 67:   PetscCall(DMCreate(comm, dm));
 68:   PetscCall(DMSetType(*dm, DMPLEX));
 69:   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
 70:   PetscCall(DMSetFromOptions(*dm));
 71:   PetscCall(DMLocalizeCoordinates(*dm));

 73:   PetscCall(DMViewFromOptions(*dm, NULL, "-init_dm_view"));
 74:   PetscCall(DMGetDimension(*dm, &dim));

 76:   if (testp4est_seq) {
 77:     PetscCheck(PetscDefined(HAVE_P4EST), PETSC_COMM_WORLD, PETSC_ERR_SUP, "Reconfigure PETSc with --download-p4est");
 78:     DM dmConv = NULL;

 80:     PetscCall(DMPlexCheck(*dm));
 81:     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
 82:     PetscCall(DMPlexSetTransformType(*dm, DMPLEXREFINETOBOX));
 83:     PetscCall(DMRefine(*dm, PETSC_COMM_WORLD, &dmConv));
 84:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
 85:     if (dmConv) {
 86:       PetscCall(DMDestroy(dm));
 87:       *dm = dmConv;
 88:     }
 89:     PetscCall(DMViewFromOptions(*dm, NULL, "-initref_dm_view"));
 90:     PetscCall(DMPlexCheck(*dm));

 92:     /* For topologically periodic meshes, we first localize coordinates,
 93:        and then remove any information related with the
 94:        automatic computation of localized vertices.
 95:        This way, refinement operations and conversions to p4est
 96:        will preserve the shape of the domain in physical space */
 97:     PetscCall(DMSetPeriodicity(*dm, NULL, NULL, NULL));

 99:     PetscCall(DMConvert(*dm, dim == 2 ? DMP4EST : DMP8EST, &dmConv));
100:     if (dmConv) {
101:       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dmConv, "conv_seq_1_"));
102:       PetscCall(DMSetFromOptions(dmConv));
103:       PetscCall(DMDestroy(dm));
104:       *dm = dmConv;
105:     }
106:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "conv_seq_1_"));
107:     PetscCall(DMSetUp(*dm));
108:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
109:     PetscCall(DMConvert(*dm, DMPLEX, &dmConv));
110:     if (dmConv) {
111:       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dmConv, "conv_seq_2_"));
112:       PetscCall(DMPlexDistributeSetDefault(dmConv, PETSC_FALSE));
113:       PetscCall(DMSetFromOptions(dmConv));
114:       PetscCall(DMDestroy(dm));
115:       *dm = dmConv;
116:     }
117:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "conv_seq_2_"));
118:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
119:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
120:   }

122:   PetscCall(PetscLogStagePop());
123:   if (!testp4est_seq) {
124:     PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
125:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_dist_view"));
126:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "dist_"));
127:     PetscCall(DMSetFromOptions(*dm));
128:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
129:     PetscCall(PetscLogStagePop());
130:     PetscCall(DMViewFromOptions(*dm, NULL, "-distributed_dm_view"));
131:   }
132:   PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
133:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "ref_"));
134:   PetscCall(DMSetFromOptions(*dm));
135:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
136:   PetscCall(PetscLogStagePop());

138:   if (testp4est_par) {
139:     PetscCheck(PetscDefined(HAVE_P4EST), PETSC_COMM_WORLD, PETSC_ERR_SUP, "Reconfigure PETSc with --download-p4est");
140:     DM dmConv = NULL;

142:     PetscCall(DMPlexCheck(*dm));
143:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_tobox_view"));
144:     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
145:     PetscCall(DMPlexSetTransformType(*dm, DMPLEXREFINETOBOX));
146:     PetscCall(DMRefine(*dm, PETSC_COMM_WORLD, &dmConv));
147:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
148:     if (dmConv) {
149:       PetscCall(DMDestroy(dm));
150:       *dm = dmConv;
151:     }
152:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_tobox_view"));
153:     PetscCall(DMPlexCheck(*dm));

155:     PetscCall(DMConvert(*dm, dim == 2 ? DMP4EST : DMP8EST, &dmConv));
156:     if (dmConv) {
157:       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dmConv, "conv_par_1_"));
158:       PetscCall(DMSetFromOptions(dmConv));
159:       PetscCall(DMDestroy(dm));
160:       *dm = dmConv;
161:     }
162:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "conv_par_1_"));
163:     PetscCall(DMSetUp(*dm));
164:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
165:     PetscCall(DMConvert(*dm, DMPLEX, &dmConv));
166:     if (dmConv) {
167:       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dmConv, "conv_par_2_"));
168:       PetscCall(DMPlexDistributeSetDefault(dmConv, PETSC_FALSE));
169:       PetscCall(DMSetFromOptions(dmConv));
170:       PetscCall(DMDestroy(dm));
171:       *dm = dmConv;
172:     }
173:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "conv_par_2_"));
174:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
175:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
176:   }

178:   /* test redistribution of an already distributed mesh */
179:   if (user->redistribute) {
180:     DM       distributedMesh;
181:     PetscSF  sf;
182:     PetscInt nranks;

184:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_redist_view"));
185:     PetscCall(DMPlexDistribute(*dm, 0, NULL, &distributedMesh));
186:     if (distributedMesh) {
187:       PetscCall(DMGetPointSF(distributedMesh, &sf));
188:       PetscCall(PetscSFSetUp(sf));
189:       PetscCall(DMGetNeighbors(distributedMesh, &nranks, NULL));
190:       PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &nranks, 1, MPIU_INT, MPI_MIN, PetscObjectComm((PetscObject)*dm)));
191:       PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)*dm)), "Minimum number of neighbors: %" PetscInt_FMT "\n", nranks));
192:       PetscCall(DMDestroy(dm));
193:       *dm = distributedMesh;
194:     }
195:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_post_redist_view"));
196:   }

198:   if (user->overlap) {
199:     DM overlapMesh = NULL;

201:     /* Add the overlap to refined mesh */
202:     PetscCall(PetscLogStagePush(user->stages[STAGE_OVERLAP]));
203:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_overlap_view"));
204:     PetscCall(DMPlexDistributeOverlap(*dm, user->overlap, NULL, &overlapMesh));
205:     if (overlapMesh) {
206:       PetscInt overlap;
207:       PetscCall(DMPlexGetOverlap(overlapMesh, &overlap));
208:       PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "Overlap: %" PetscInt_FMT "\n", overlap));
209:       PetscCall(DMDestroy(dm));
210:       *dm = overlapMesh;
211:     }
212:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_post_overlap_view"));
213:     PetscCall(PetscLogStagePop());
214:   }
215:   if (user->final_ref) {
216:     DM refinedMesh = NULL;

218:     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
219:     PetscCall(DMRefine(*dm, comm, &refinedMesh));
220:     if (refinedMesh) {
221:       PetscCall(DMDestroy(dm));
222:       *dm = refinedMesh;
223:     }
224:   }

226:   PetscCall(PetscObjectSetName((PetscObject)*dm, "Generated Mesh"));
227:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
228:   if (user->final_diagnostics) PetscCall(DMPlexCheck(*dm));
229:   PetscCall(PetscLogEventEnd(user->createMeshEvent, 0, 0, 0, 0));
230:   PetscFunctionReturn(PETSC_SUCCESS);
231: }

233: int main(int argc, char **argv)
234: {
235:   DM     dm;
236:   AppCtx user;

238:   PetscFunctionBeginUser;
239:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
240:   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
241:   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
242:   PetscCall(DMDestroy(&dm));
243:   PetscCall(PetscFinalize());
244:   return 0;
245: }

247: /*TEST

249:   # CTetGen 0-1
250:   test:
251:     suffix: 0
252:     requires: ctetgen
253:     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_interpolate 0 -ctetgen_verbose 4 -dm_view ascii::ascii_info_detail -info :~sys
254:   test:
255:     suffix: 1
256:     requires: ctetgen
257:     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_interpolate 0 -ctetgen_verbose 4 -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail -info :~sys

259:   # 2D LaTex and ASCII output 2-9
260:   test:
261:     suffix: 2
262:     requires: triangle
263:     args: -dm_plex_interpolate 0 -dm_view ascii::ascii_latex
264:   test:
265:     suffix: 3
266:     requires: triangle
267:     args: -ref_dm_refine 1 -dm_view ascii::ascii_info_detail
268:   test:
269:     suffix: 4
270:     requires: triangle
271:     nsize: 2
272:     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_info_detail
273:   test:
274:     suffix: 5
275:     requires: triangle
276:     nsize: 2
277:     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex
278:   test:
279:     suffix: 6
280:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_view ascii::ascii_info_detail
281:   test:
282:     suffix: 7
283:     args: -dm_coord_space 0 -dm_plex_simplex 0 -ref_dm_refine 1 -dm_view ascii::ascii_info_detail
284:   test:
285:     suffix: 8
286:     nsize: 2
287:     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex
288:   test:
289:     suffix: box_2d_latex_xper
290:     nsize: 1
291:     args: -dm_plex_simplex 0 -dm_plex_box_faces 5,5 -dm_plex_box_bd periodic,none \
292:           -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex -dm_plex_view_edges 0

294:   # 1D ASCII output
295:   testset:
296:     args: -dm_coord_space 0 -dm_plex_dim 1 -dm_view ascii::ascii_info_detail -dm_plex_check_all
297:     test:
298:       suffix: 1d_0
299:       args:
300:     test:
301:       suffix: 1d_1
302:       args: -ref_dm_refine 2
303:     test:
304:       suffix: 1d_2
305:       args: -dm_plex_box_faces 5 -dm_plex_box_bd periodic

307:   # Parallel refinement tests with overlap
308:   test:
309:     suffix: refine_overlap_1d
310:     nsize: 2
311:     args: -dm_plex_dim 1 -dim 1 -dm_plex_box_faces 4 -dm_plex_box_faces 4 -ref_dm_refine 1 -overlap {{0 1 2}separate output} -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_info
312:   test:
313:     suffix: refine_overlap_2d
314:     requires: triangle
315:     nsize: {{2 8}separate output}
316:     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -overlap {{0 1 2}separate output} -dm_view ascii::ascii_info

318:   # Parallel extrusion tests
319:   test:
320:     suffix: 1d_extruded
321:     args: -dm_plex_dim 1 -dm_plex_box_faces 5 -dm_extrude 3 -dm_plex_check_all -dm_view draw

323:   test:
324:     # This test needs a non-tensor prism so we can make a coordinate space
325:     suffix: spheresurface_extruded
326:     nsize : 4
327:     args: -dm_plex_shape sphere -dm_extrude 3 -dm_plex_transform_extrude_use_tensor 0 \
328:           -dist_dm_distribute -petscpartitioner_type simple \
329:           -dm_plex_check_all -dm_view ::ascii_info_detail -dm_plex_view_coord_system spherical

331:   test:
332:     # This test needs a non-tensor prism so we can make a coordinate space
333:     suffix: spheresurface_extruded_symmetric
334:     nsize : 4
335:     args: -dm_plex_shape sphere -dm_extrude 3 -dm_plex_transform_extrude_use_tensor 0 -dm_plex_transform_extrude_symmetric \
336:           -dist_dm_distribute -petscpartitioner_type simple \
337:           -dm_plex_check_all -dm_view ::ascii_info_detail -dm_plex_view_coord_system spherical

339:   test:
340:     # Test with a tensor prism which cannot have a coordinate space
341:     suffix: spheresurface_extruded_nocoord
342:     nsize : 4
343:     args: -dm_coord_space 0 -dm_plex_shape sphere -dm_extrude 3 \
344:           -dist_dm_distribute -petscpartitioner_type simple \
345:           -dm_plex_check_all -dm_view ::ascii_info_detail -dm_plex_view_coord_system spherical

347:   # Parallel simple partitioner tests
348:   test:
349:     suffix: part_simple_0
350:     requires: triangle
351:     nsize: 2
352:     args: -dm_coord_space 0 -dm_plex_interpolate 0 -dist_dm_distribute -petscpartitioner_type simple -dist_partition_view -dm_view ascii::ascii_info_detail
353:   test:
354:     suffix: part_simple_1
355:     requires: triangle
356:     nsize: 8
357:     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dist_partition_view -dm_view ascii::ascii_info_detail

359:   # Parallel partitioner tests
360:   test:
361:     suffix: part_parmetis_0
362:     requires: parmetis
363:     nsize: 2
364:     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type parmetis -dm_view -petscpartitioner_view -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
365:   test:
366:     suffix: part_ptscotch_0
367:     requires: ptscotch
368:     nsize: 2
369:     args: -dm_plex_simplex 0 -dist_dm_distribute -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_strategy quality -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
370:   test:
371:     suffix: part_ptscotch_1
372:     requires: ptscotch
373:     nsize: 8
374:     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_imbalance 0.1

376:   # CGNS reader tests 10-11 (need to find smaller test meshes)
377:   test:
378:     suffix: cgns_0
379:     requires: cgns
380:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/tut21.cgns -dm_view

382:   # ExodusII reader tests
383:   testset:
384:     args: -dm_plex_boundary_label boundary -dm_plex_check_all -dm_view
385:     test:
386:       suffix: exo_0
387:       requires: exodusii
388:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/sevenside-quad.exo
389:     test:
390:       suffix: exo_1
391:       requires: exodusii
392:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/sevenside-quad-15.exo
393:     test:
394:       suffix: exo_2
395:       requires: exodusii
396:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/squaremotor-30.exo
397:     test:
398:       suffix: exo_3
399:       requires: exodusii
400:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
401:     test:
402:       suffix: exo_4
403:       requires: exodusii
404:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/simpleblock-100.exo
405:     test:
406:       suffix: exo_1d_0
407:       requires: exodusii
408:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/1d-2elems.e

410:   # Gmsh mesh reader tests
411:   testset:
412:     args: -dm_coord_space 0 -dm_view

414:     test:
415:       suffix: gmsh_0
416:       requires: !single
417:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
418:     test:
419:       suffix: gmsh_1
420:       requires: !single
421:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh
422:     test:
423:       suffix: gmsh_2
424:       requires: !single
425:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh
426:     test:
427:       suffix: gmsh_3
428:       nsize: 3
429:       requires: !single
430:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -dist_dm_distribute -petscpartitioner_type simple
431:     test:
432:       suffix: gmsh_4
433:       nsize: 3
434:       requires: !single
435:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dist_dm_distribute -petscpartitioner_type simple
436:     test:
437:       suffix: gmsh_5
438:       requires: !single
439:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_quad.msh
440:     # TODO: it seems the mesh is not a valid gmsh (inverted cell)
441:     test:
442:       suffix: gmsh_6
443:       requires: !single
444:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -final_diagnostics 0
445:     test:
446:       suffix: gmsh_7
447:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view ::ascii_info_detail -dm_plex_check_all
448:     test:
449:       suffix: gmsh_8
450:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh -dm_view ::ascii_info_detail -dm_plex_check_all
451:   testset:
452:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic_bin.msh -dm_view ::ascii_info_detail -dm_plex_check_all
453:     test:
454:       suffix: gmsh_9
455:     test:
456:       suffix: gmsh_9_periodic_0
457:       args: -dm_plex_gmsh_periodic 0
458:   testset:
459:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -dm_plex_check_all
460:     test:
461:       suffix: gmsh_10
462:     test:
463:       suffix: gmsh_10_periodic_0
464:       args: -dm_plex_gmsh_periodic 0
465:   testset:
466:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -dm_plex_check_all -ref_dm_refine 1
467:     test:
468:       suffix: gmsh_11
469:     test:
470:       suffix: gmsh_11_periodic_0
471:       args: -dm_plex_gmsh_periodic 0
472:   # TODO: it seems the mesh is not a valid gmsh (inverted cell)
473:   test:
474:     suffix: gmsh_12
475:     nsize: 4
476:     requires: !single mpiio
477:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -viewer_binary_mpiio -dist_dm_distribute -petscpartitioner_type simple -dm_view -final_diagnostics 0
478:   test:
479:     suffix: gmsh_13_hybs2t
480:     nsize: 4
481:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -dist_dm_distribute -petscpartitioner_type simple -dm_view -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all
482:   test:
483:     suffix: gmsh_14_ext
484:     requires: !single
485:     args: -dm_coord_space 0 -dm_extrude 2 -dm_plex_transform_extrude_thickness 1.5 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -dm_plex_check_all
486:   test:
487:     suffix: gmsh_14_ext_s2t
488:     requires: !single
489:     args: -dm_coord_space 0 -dm_extrude 2 -dm_plex_transform_extrude_thickness 1.5 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
490:   test:
491:     suffix: gmsh_15_hyb3d
492:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -dm_plex_check_all
493:   test:
494:     suffix: gmsh_15_hyb3d_vtk
495:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view vtk: -dm_plex_gmsh_hybrid -dm_plex_check_all
496:   test:
497:     suffix: gmsh_15_hyb3d_s2t
498:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
499:   test:
500:     suffix: gmsh_16_spheresurface
501:     nsize : 4
502:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
503:   test:
504:     suffix: gmsh_16_spheresurface_s2t
505:     nsize : 4
506:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
507:   test:
508:     suffix: gmsh_16_spheresurface_extruded
509:     nsize : 4
510:     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
511:   test:
512:     suffix: gmsh_16_spheresurface_extruded_s2t
513:     nsize : 4
514:     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
515:   test:
516:     suffix: gmsh_17_hyb3d_interp_ascii
517:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.msh -dm_view -dm_plex_check_all
518:   test:
519:     suffix: exodus_17_hyb3d_interp_ascii
520:     requires: exodusii
521:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.exo -dm_view -dm_plex_check_all

523:   # Legacy Gmsh v22/v40 ascii/binary reader tests
524:   testset:
525:     output_file: output/ex1_gmsh_3d_legacy.out
526:     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all
527:     test:
528:       suffix: gmsh_3d_ascii_v22
529:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh2
530:     test:
531:       suffix: gmsh_3d_ascii_v40
532:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh4
533:     test:
534:       suffix: gmsh_3d_binary_v22
535:       # Could not remake binary to remove extra face labeling
536:       output_file: output/ex1_gmsh_3d_legacy_v22_bin.out
537:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh2
538:     test:
539:       suffix: gmsh_3d_binary_v40
540:       requires: long64
541:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh4

543:   # Gmsh v41 ascii/binary reader tests
544:   testset: # 32-bit mesh, sequential
545:     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
546:     output_file: output/ex1_gmsh_3d_32.out
547:     test:
548:       suffix: gmsh_3d_ascii_v41_32
549:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
550:     test:
551:       suffix: gmsh_3d_binary_v41_32
552:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
553:     test:
554:       suffix: gmsh_3d_binary_v41_32_mpiio
555:       requires: defined(PETSC_HAVE_MPIIO)
556:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
557:   test:
558:     suffix: gmsh_quad_8node
559:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-qua-8node.msh \
560:           -dm_view -dm_plex_check_all -dm_plex_gmsh_mark_vertices
561:   test:
562:     suffix: gmsh_hex_20node
563:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-hex-20node.msh \
564:           -dm_view -dm_plex_check_all -dm_plex_gmsh_mark_vertices
565:   testset:  # 32-bit mesh, parallel
566:     args: -dm_coord_space 0 -dist_dm_distribute -petscpartitioner_type simple -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
567:     nsize: 2
568:     output_file: output/ex1_gmsh_3d_32_np2.out
569:     test:
570:       suffix: gmsh_3d_ascii_v41_32_np2
571:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
572:     test:
573:       suffix: gmsh_3d_binary_v41_32_np2
574:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
575:     test:
576:       suffix: gmsh_3d_binary_v41_32_np2_mpiio
577:       requires: defined(PETSC_HAVE_MPIIO)
578:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
579:   testset: # 64-bit mesh, sequential
580:     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
581:     output_file: output/ex1_gmsh_3d_64.out
582:     test:
583:       suffix: gmsh_3d_ascii_v41_64
584:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
585:     test:
586:       suffix: gmsh_3d_binary_v41_64
587:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
588:     test:
589:       suffix: gmsh_3d_binary_v41_64_mpiio
590:       requires: defined(PETSC_HAVE_MPIIO)
591:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio
592:   testset:  # 64-bit mesh, parallel
593:     args: -dm_coord_space 0 -dist_dm_distribute -petscpartitioner_type simple -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
594:     nsize: 2
595:     output_file: output/ex1_gmsh_3d_64_np2.out
596:     test:
597:       suffix: gmsh_3d_ascii_v41_64_np2
598:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
599:     test:
600:       suffix: gmsh_3d_binary_v41_64_np2
601:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
602:     test:
603:       suffix: gmsh_3d_binary_v41_64_np2_mpiio
604:       requires: defined(PETSC_HAVE_MPIIO)
605:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio

607:   # Fluent mesh reader tests
608:   # TODO: Geometry checks fail
609:   test:
610:     suffix: fluent_0
611:     requires: !complex
612:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -dm_view -final_diagnostics 0
613:   test:
614:     suffix: fluent_1
615:     nsize: 3
616:     requires: !complex
617:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -dist_dm_distribute -petscpartitioner_type simple -dm_view -final_diagnostics 0
618:   test:
619:     suffix: fluent_2
620:     requires: !complex
621:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets_ascii.cas -dm_view -final_diagnostics 0
622:   test:
623:     suffix: fluent_3
624:     requires: !complex
625:     TODO: Fails on non-linux: fseek(), fileno() ? https://gitlab.com/petsc/petsc/merge_requests/2206#note_238166382
626:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets.cas -dm_view -final_diagnostics 0

628:   # Med mesh reader tests, including parallel file reads
629:   test:
630:     suffix: med_0
631:     requires: med
632:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -dm_view
633:   test:
634:     suffix: med_1
635:     requires: med
636:     nsize: 3
637:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -dist_dm_distribute -petscpartitioner_type simple -dm_view
638:   test:
639:     suffix: med_2
640:     requires: med
641:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -dm_view
642:   test:
643:     suffix: med_3
644:     requires: med
645:     TODO: MED
646:     nsize: 3
647:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -dist_dm_distribute -petscpartitioner_type simple -dm_view

649:   # Test shape quality
650:   test:
651:     suffix: test_shape
652:     requires: ctetgen
653:     args: -dm_plex_dim 3 -dim 3 -dm_refine_hierarchy 3 -dm_plex_check_all -dm_plex_check_cell_shape

655:   # Test simplex to tensor conversion
656:   test:
657:     suffix: s2t2
658:     requires: triangle
659:     args: -dm_coord_space 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail

661:   test:
662:     suffix: s2t3
663:     requires: ctetgen
664:     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail

666:   # Test cylinder
667:   testset:
668:     args: -dm_plex_shape cylinder -dm_plex_check_all -dm_view
669:     test:
670:       suffix: cylinder
671:       args: -ref_dm_refine 1
672:     test:
673:       suffix: cylinder_per
674:       args: -dm_plex_cylinder_bd periodic -ref_dm_refine 1 -ref_dm_refine_remap 0
675:     test:
676:       suffix: cylinder_wedge
677:       args: -dm_coord_space 0 -dm_plex_interpolate 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk:
678:     test:
679:       suffix: cylinder_wedge_int
680:       output_file: output/ex1_cylinder_wedge.out
681:       args: -dm_coord_space 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk:

683:   test:
684:     suffix: box_2d
685:     args: -dm_plex_simplex 0 -ref_dm_refine 2 -dm_plex_check_all -dm_view

687:   test:
688:     suffix: box_2d_per
689:     args: -dm_plex_simplex 0 -ref_dm_refine 2 -dm_plex_check_all -dm_view

691:   test:
692:     suffix: box_2d_per_unint
693:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_interpolate 0 -dm_plex_box_faces 3,3 -dm_plex_box_faces 3,3 -dm_plex_check_all -dm_view ::ascii_info_detail

695:   test:
696:     suffix: box_3d
697:     args: -dm_plex_dim 3 -dim 3 -dm_plex_simplex 0 -ref_dm_refine 3 -dm_plex_check_all -dm_view

699:   test:
700:     requires: triangle
701:     suffix: box_wedge
702:     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_simplex 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk: -dm_plex_check_all

704:   testset:
705:     requires: triangle
706:     args: -dm_coord_space 0 -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_cell tensor_triangular_prism -dm_plex_box_faces 2,3,1 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
707:     test:
708:       suffix: box_wedge_s2t
709:     test:
710:       nsize: 3
711:       args: -dist_dm_distribute -petscpartitioner_type simple
712:       suffix: box_wedge_s2t_parallel

714:   # Test GLVis output
715:   testset:
716:     args: -dm_coord_space 0 -dm_plex_interpolate 0
717:     test:
718:       suffix: glvis_2d_tet
719:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
720:     test:
721:       suffix: glvis_2d_tet_per
722:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
723:     test:
724:       suffix: glvis_3d_tet
725:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
726:   testset:
727:     args: -dm_coord_space 0
728:     test:
729:       suffix: glvis_2d_tet_per_mfem
730:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem -dm_view glvis:
731:     test:
732:       suffix: glvis_2d_quad
733:       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_view glvis:
734:     test:
735:       suffix: glvis_2d_quad_per
736:       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
737:     test:
738:       suffix: glvis_2d_quad_per_shift
739:       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_plex_box_lower -1,-1 -dm_plex_box_upper 1,1 -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
740:     test:
741:       suffix: glvis_2d_quad_per_mfem
742:       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem
743:     test:
744:       suffix: glvis_3d_tet_per
745:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
746:     test:
747:       suffix: glvis_3d_tet_per_mfem
748:       TODO: broken
749:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -viewer_glvis_dm_plex_enable_mfem -dm_view glvis:
750:     test:
751:       suffix: glvis_3d_hex
752:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_view glvis:
753:     test:
754:       suffix: glvis_3d_hex_per
755:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_plex_box_bd periodic,periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
756:     test:
757:       suffix: glvis_3d_hex_per_mfem
758:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_plex_box_bd periodic,periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem
759:     test:
760:       suffix: glvis_2d_hyb
761:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
762:     test:
763:       suffix: glvis_3d_hyb
764:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
765:     test:
766:       suffix: glvis_3d_hyb_s2t
767:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all

769:   # Test P4EST
770:   testset:
771:     requires: p4est
772:     args: -dm_coord_space 0 -dm_view -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 1
773:     test:
774:       suffix: p4est_periodic
775:       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
776:     test:
777:       suffix: p4est_periodic_3d
778:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
779:     test:
780:       suffix: p4est_gmsh_periodic
781:       args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
782:     test:
783:       suffix: p4est_gmsh_surface
784:       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
785:     test:
786:       suffix: p4est_gmsh_surface_parallel
787:       nsize: 2
788:       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -petscpartitioner_type simple -dm_view ::load_balance
789:     test:
790:       suffix: p4est_hyb_2d
791:       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
792:     test:
793:       suffix: p4est_hyb_3d
794:       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh
795:     test:
796:       requires: ctetgen
797:       suffix: p4est_s2t_bugfaces_3d
798:       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 0 -dm_plex_dim 3 -dm_plex_box_faces 1,1
799:     test:
800:       suffix: p4est_bug_overlapsf
801:       nsize: 3
802:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple
803:     test:
804:       suffix: p4est_redistribute
805:       nsize: 3
806:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_view ::load_balance
807:     test:
808:       suffix: p4est_gmsh_s2t_3d
809:       args: -conv_seq_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
810:     test:
811:       suffix: p4est_gmsh_s2t_3d_hash
812:       args: -conv_seq_1_dm_forest_initial_refinement 1 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
813:     test:
814:       requires: long_runtime
815:       suffix: p4est_gmsh_periodic_3d
816:       args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh

818:   testset:
819:     requires: p4est
820:     nsize: 6
821:     args: -dm_coord_space 0 -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 0 -dist_dm_distribute
822:     test:
823:       TODO: interface cones do not conform
824:       suffix: p4est_par_periodic
825:       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
826:     test:
827:       TODO: interface cones do not conform
828:       suffix: p4est_par_periodic_3d
829:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,periodic -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
830:     test:
831:       TODO: interface cones do not conform
832:       suffix: p4est_par_gmsh_periodic
833:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
834:     test:
835:       suffix: p4est_par_gmsh_surface
836:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
837:     test:
838:       suffix: p4est_par_gmsh_s2t_3d
839:       args: -conv_par_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
840:     test:
841:       TODO: interface cones do not conform
842:       suffix: p4est_par_gmsh_s2t_3d_hash
843:       args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
844:     test:
845:       requires: long_runtime
846:       suffix: p4est_par_gmsh_periodic_3d
847:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh

849:   testset:
850:     requires: p4est
851:     nsize: 6
852:     args: -dm_coord_space 0 -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 1 -dist_dm_distribute -petscpartitioner_type simple
853:     test:
854:       suffix: p4est_par_ovl_periodic
855:       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
856:     #TODO Mesh cell 201 is inverted, vol = 0. (FVM Volume. Is it correct? -> Diagnostics disabled)
857:     test:
858:       suffix: p4est_par_ovl_periodic_3d
859:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -final_diagnostics 0
860:     test:
861:       suffix: p4est_par_ovl_gmsh_periodic
862:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
863:     test:
864:       suffix: p4est_par_ovl_gmsh_surface
865:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
866:     test:
867:       suffix: p4est_par_ovl_gmsh_s2t_3d
868:       args: -conv_par_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
869:     test:
870:       suffix: p4est_par_ovl_gmsh_s2t_3d_hash
871:       args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
872:     test:
873:       requires: long_runtime
874:       suffix: p4est_par_ovl_gmsh_periodic_3d
875:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
876:     test:
877:       suffix: p4est_par_ovl_hyb_2d
878:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
879:     test:
880:       suffix: p4est_par_ovl_hyb_3d
881:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh

883:   test:
884:     TODO: broken
885:     requires: p4est
886:     nsize: 2
887:     suffix: p4est_bug_labels_noovl
888:     args: -test_p4est_seq -dm_plex_check_all -dm_forest_minimum_refinement 0 -dm_forest_partition_overlap 1 -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_forest_initial_refinement 0 -dm_forest_maximum_refinement 2 -dm_p4est_refine_pattern hash -dist_dm_distribute -petscpartitioner_type simple -dm_forest_print_label_error

890:   test:
891:     requires: p4est
892:     nsize: 2
893:     suffix: p4est_bug_distribute_overlap
894:     args: -dm_coord_space 0 -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 0 -conv_seq_1_dm_forest_partition_overlap 0 -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -overlap 1 -dm_view ::load_balance
895:     args: -dm_post_overlap_view

897:   test:
898:     suffix: ref_alfeld2d_0
899:     requires: triangle
900:     args: -dm_plex_box_faces 5,3 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_alfeld -final_diagnostics
901:   test:
902:     suffix: ref_alfeld3d_0
903:     requires: ctetgen
904:     args: -dm_plex_dim 3 -dm_plex_box_faces 5,1,1 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_alfeld -final_diagnostics

906:   # Boundary layer refiners
907:   test:
908:     suffix: ref_bl_1
909:     args: -dm_plex_dim 1 -dm_plex_simplex 0 -dm_plex_box_faces 5,1 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 2 -final_diagnostics -ref_dm_plex_transform_bl_splits 3
910:   test:
911:     suffix: ref_bl_2_tri
912:     requires: triangle
913:     args: -dm_coord_space 0 -dm_plex_box_faces 5,3 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 3 -final_diagnostics -ref_dm_plex_transform_bl_splits 4
914:   test:
915:     suffix: ref_bl_3_quad
916:     args: -dm_plex_simplex 0 -dm_plex_box_faces 5,1 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 3 -final_diagnostics -ref_dm_plex_transform_bl_splits 4
917:   test:
918:     suffix: ref_bl_spheresurface_extruded
919:     nsize : 4
920:     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple -final_diagnostics -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -ref_dm_plex_transform_bl_splits 2
921:   test:
922:     suffix: ref_bl_3d_hyb
923:     nsize : 4
924:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple -final_diagnostics -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -ref_dm_plex_transform_bl_splits 4 -ref_dm_plex_transform_bl_height_factor 3.1

926:   testset:
927:     args: -dm_plex_shape sphere -dm_plex_check_all -dm_view
928:     test:
929:       suffix: sphere_0
930:       args:
931:     test:
932:       suffix: sphere_1
933:       args: -ref_dm_refine 2
934:     test:
935:       suffix: sphere_2
936:       args: -dm_plex_simplex 0
937:     test:
938:       suffix: sphere_3
939:       args: -dm_plex_simplex 0 -ref_dm_refine 2

941:   test:
942:     suffix: ball_0
943:     requires: ctetgen
944:     args: -dm_plex_dim 3 -dm_plex_shape ball -dm_plex_check_all -dm_view

946:   test:
947:     suffix: ball_1
948:     requires: ctetgen
949:     args: -dm_plex_dim 3 -dm_plex_shape ball -bd_dm_refine 2 -dm_plex_check_all -dm_view

951:   test:
952:     suffix: schwarz_p_extrude
953:     args: -dm_plex_shape schwarz_p -dm_plex_tps_extent 1,1,1 -dm_plex_tps_layers 1 -dm_plex_tps_thickness .2 -dm_view

955:   test:
956:     suffix: pyr_mixed_0
957:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/pyr_tet.msh -dm_plex_check_all -dm_view

959:   test:
960:     suffix: hypercubic_0
961:     args: -dm_plex_dim 2 -dm_plex_shape hypercubic -dm_plex_box_faces 3,3 -dm_plex_check_all -dm_view

963:   test:
964:     suffix: hypercubic_1
965:     args: -dm_plex_dim 3 -dm_plex_shape hypercubic -dm_plex_box_faces 3,3,3 -dm_plex_check_all -dm_view

967:   test:
968:     suffix: hypercubic_2
969:     args: -dm_plex_dim 4 -dm_plex_shape hypercubic -dm_plex_box_faces 3,3,3,3 -dm_view \
970:           -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_faces -dm_plex_check_pointsf -final_diagnostics 0

972:   test:
973:     suffix: hypercubic_3
974:     args: -dm_plex_dim 5 -dm_plex_shape hypercubic -dm_plex_box_faces 3,3,3,3,3 -dm_view \
975:           -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_faces -dm_plex_check_pointsf -final_diagnostics 0

977:   test:
978:     suffix: hypercubic_4
979:     args: -dm_plex_dim 6 -dm_plex_shape hypercubic -dm_plex_box_faces 3,3,3,3,3,3 -dm_view \
980:           -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_faces -dm_plex_check_pointsf -final_diagnostics 0
981: TEST*/