Actual source code: ex12.c

  1: static char help[] = "Partition a mesh in parallel, perhaps with overlap\n\n";

  3: #include <petscdmplex.h>
  4: #include <petscsf.h>

  6: /* Sample usage:

  8: Load a file in serial and distribute it on 24 processes:

 10:   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -orig_dm_view -dm_view" NP=24

 12: Load a file in serial and distribute it on 24 processes using a custom partitioner:

 14:   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/cylinder.med -petscpartitioner_type simple -orig_dm_view -dm_view" NP=24

 16: Load a file in serial, distribute it, and then redistribute it on 24 processes using two different partitioners:

 18:   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type simple -load_balance -lb_petscpartitioner_type parmetis -orig_dm_view -dm_view" NP=24

 20: Load a file in serial, distribute it randomly, refine it in parallel, and then redistribute it on 24 processes using two different partitioners, and view to VTK:

 22:   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type shell -petscpartitioner_shell_random -dm_refine 1 -load_balance -lb_petscpartitioner_type parmetis -prelb_dm_view vtk:$PWD/prelb.vtk -dm_view vtk:$PWD/balance.vtk -dm_partition_view" NP=24

 24: */

 26: enum {
 27:   STAGE_LOAD,
 28:   STAGE_DISTRIBUTE,
 29:   STAGE_REFINE,
 30:   STAGE_REDISTRIBUTE
 31: };

 33: typedef struct {
 34:   /* Domain and mesh definition */
 35:   PetscInt      overlap;          /* The cell overlap to use during partitioning */
 36:   PetscBool     testPartition;    /* Use a fixed partitioning for testing */
 37:   PetscBool     testRedundant;    /* Use a redundant partitioning for testing */
 38:   PetscBool     loadBalance;      /* Load balance via a second distribute step */
 39:   PetscBool     partitionBalance; /* Balance shared point partition */
 40:   PetscLogStage stages[4];
 41: } AppCtx;

 43: PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 44: {
 45:   PetscFunctionBegin;
 46:   options->overlap          = 0;
 47:   options->testPartition    = PETSC_FALSE;
 48:   options->testRedundant    = PETSC_FALSE;
 49:   options->loadBalance      = PETSC_FALSE;
 50:   options->partitionBalance = PETSC_FALSE;

 52:   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
 53:   PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex12.c", options->overlap, &options->overlap, NULL, 0));
 54:   PetscCall(PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex12.c", options->testPartition, &options->testPartition, NULL));
 55:   PetscCall(PetscOptionsBool("-test_redundant", "Use a redundant partition for testing", "ex12.c", options->testRedundant, &options->testRedundant, NULL));
 56:   PetscCall(PetscOptionsBool("-load_balance", "Perform parallel load balancing in a second distribution step", "ex12.c", options->loadBalance, &options->loadBalance, NULL));
 57:   PetscCall(PetscOptionsBool("-partition_balance", "Balance the ownership of shared points", "ex12.c", options->partitionBalance, &options->partitionBalance, NULL));
 58:   PetscOptionsEnd();

 60:   PetscCall(PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]));
 61:   PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
 62:   PetscCall(PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]));
 63:   PetscCall(PetscLogStageRegister("MeshRedistribute", &options->stages[STAGE_REDISTRIBUTE]));
 64:   PetscFunctionReturn(PETSC_SUCCESS);
 65: }

 67: PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
 68: {
 69:   DM          pdm             = NULL;
 70:   PetscInt    triSizes_n2[2]  = {4, 4};
 71:   PetscInt    triPoints_n2[8] = {0, 1, 4, 6, 2, 3, 5, 7};
 72:   PetscInt    triSizes_n3[3]  = {3, 2, 3};
 73:   PetscInt    triPoints_n3[8] = {3, 5, 6, 1, 7, 0, 2, 4};
 74:   PetscInt    triSizes_n4[4]  = {2, 2, 2, 2};
 75:   PetscInt    triPoints_n4[8] = {0, 7, 1, 5, 2, 3, 4, 6};
 76:   PetscInt    triSizes_n8[8]  = {1, 1, 1, 1, 1, 1, 1, 1};
 77:   PetscInt    triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7};
 78:   PetscInt    quadSizes[2]    = {2, 2};
 79:   PetscInt    quadPoints[4]   = {2, 3, 0, 1};
 80:   PetscInt    overlap         = user->overlap >= 0 ? user->overlap : 0;
 81:   PetscInt    dim;
 82:   PetscBool   simplex;
 83:   PetscMPIInt rank, size;

 85:   PetscFunctionBegin;
 86:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 87:   PetscCallMPI(MPI_Comm_size(comm, &size));
 88:   PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
 89:   PetscCall(DMCreate(comm, dm));
 90:   PetscCall(DMSetType(*dm, DMPLEX));
 91:   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
 92:   PetscCall(DMSetFromOptions(*dm));
 93:   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
 94:   PetscCall(PetscLogStagePop());
 95:   PetscCall(DMGetDimension(*dm, &dim));
 96:   PetscCall(DMPlexIsSimplex(*dm, &simplex));
 97:   PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
 98:   if (!user->testRedundant) {
 99:     PetscPartitioner part;

101:     PetscCall(DMPlexGetPartitioner(*dm, &part));
102:     PetscCall(PetscPartitionerSetFromOptions(part));
103:     PetscCall(DMPlexSetPartitionBalance(*dm, user->partitionBalance));
104:     if (user->testPartition) {
105:       const PetscInt *sizes  = NULL;
106:       const PetscInt *points = NULL;

108:       if (rank == 0) {
109:         if (dim == 2 && simplex && size == 2) {
110:           sizes  = triSizes_n2;
111:           points = triPoints_n2;
112:         } else if (dim == 2 && simplex && size == 3) {
113:           sizes  = triSizes_n3;
114:           points = triPoints_n3;
115:         } else if (dim == 2 && simplex && size == 4) {
116:           sizes  = triSizes_n4;
117:           points = triPoints_n4;
118:         } else if (dim == 2 && simplex && size == 8) {
119:           sizes  = triSizes_n8;
120:           points = triPoints_n8;
121:         } else if (dim == 2 && !simplex && size == 2) {
122:           sizes  = quadSizes;
123:           points = quadPoints;
124:         }
125:       }
126:       PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSHELL));
127:       PetscCall(PetscPartitionerShellSetPartition(part, size, sizes, points));
128:     }
129:     PetscCall(DMPlexDistribute(*dm, overlap, NULL, &pdm));
130:   } else {
131:     PetscSF sf;

133:     PetscCall(DMPlexGetRedundantDM(*dm, &sf, &pdm));
134:     if (sf) {
135:       DM test;

137:       PetscCall(DMPlexCreate(comm, &test));
138:       PetscCall(PetscObjectSetName((PetscObject)test, "Test SF-migrated Redundant Mesh"));
139:       PetscCall(DMPlexMigrate(*dm, sf, test));
140:       PetscCall(DMViewFromOptions(test, NULL, "-redundant_migrated_dm_view"));
141:       PetscCall(DMDestroy(&test));
142:     }
143:     PetscCall(PetscSFDestroy(&sf));
144:   }
145:   if (pdm) {
146:     PetscCall(DMDestroy(dm));
147:     *dm = pdm;
148:   }
149:   PetscCall(PetscLogStagePop());
150:   PetscCall(DMSetFromOptions(*dm));
151:   if (user->loadBalance) {
152:     PetscPartitioner part;

154:     PetscCall(DMViewFromOptions(*dm, NULL, "-prelb_dm_view"));
155:     PetscCall(DMPlexSetOptionsPrefix(*dm, "lb_"));
156:     PetscCall(PetscLogStagePush(user->stages[STAGE_REDISTRIBUTE]));
157:     PetscCall(DMPlexGetPartitioner(*dm, &part));
158:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part, "lb_"));
159:     PetscCall(PetscPartitionerSetFromOptions(part));
160:     if (user->testPartition) {
161:       PetscInt reSizes_n2[2]  = {2, 2};
162:       PetscInt rePoints_n2[4] = {2, 3, 0, 1};
163:       if (rank) {
164:         rePoints_n2[0] = 1;
165:         rePoints_n2[1] = 2, rePoints_n2[2] = 0, rePoints_n2[3] = 3;
166:       }

168:       PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSHELL));
169:       PetscCall(PetscPartitionerShellSetPartition(part, size, reSizes_n2, rePoints_n2));
170:     }
171:     PetscCall(DMPlexSetPartitionBalance(*dm, user->partitionBalance));
172:     PetscCall(DMPlexDistribute(*dm, overlap, NULL, &pdm));
173:     if (pdm) {
174:       PetscCall(DMDestroy(dm));
175:       *dm = pdm;
176:     }
177:     PetscCall(PetscLogStagePop());
178:   }
179:   PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
180:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
181:   PetscCall(PetscLogStagePop());
182:   PetscFunctionReturn(PETSC_SUCCESS);
183: }

185: int main(int argc, char **argv)
186: {
187:   DM     dm;
188:   AppCtx user; /* user-defined work context */

190:   PetscFunctionBeginUser;
191:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
192:   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
193:   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
194:   PetscCall(DMDestroy(&dm));
195:   PetscCall(PetscFinalize());
196:   return 0;
197: }

199: /*TEST
200:   # Parallel, no overlap tests 0-2
201:   test:
202:     suffix: 0
203:     requires: triangle
204:     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex
205:   test:
206:     suffix: 1
207:     requires: triangle
208:     nsize: 3
209:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
210:   test:
211:     suffix: 2
212:     requires: triangle
213:     nsize: 8
214:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
215:   # Parallel, level-1 overlap tests 3-4
216:   test:
217:     suffix: 3
218:     requires: triangle
219:     nsize: 3
220:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
221:   test:
222:     suffix: 4
223:     requires: triangle
224:     nsize: 8
225:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
226:   # Parallel, level-2 overlap test 5
227:   test:
228:     suffix: 5
229:     requires: triangle
230:     nsize: 8
231:     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail
232:   # Parallel load balancing, test 6-7
233:   test:
234:     suffix: 6
235:     requires: triangle
236:     nsize: 2
237:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
238:   test:
239:     suffix: 7
240:     requires: triangle
241:     nsize: 2
242:     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail
243:   # Parallel redundant copying, test 8
244:   test:
245:     suffix: 8
246:     requires: triangle
247:     nsize: 2
248:     args: -dm_coord_space 0 -test_redundant -redundant_migrated_dm_view ascii::ascii_info_detail -dm_view ascii::ascii_info_detail
249:   test:
250:     suffix: lb_0
251:     requires: parmetis
252:     nsize: 4
253:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -prelb_dm_view ::load_balance -dm_view ::load_balance

255:   # Same tests as above, but with balancing of the shared point partition
256:   test:
257:     suffix: 9
258:     requires: triangle
259:     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex -partition_balance
260:   test:
261:     suffix: 10
262:     requires: triangle
263:     nsize: 3
264:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
265:   test:
266:     suffix: 11
267:     requires: triangle
268:     nsize: 8
269:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
270:   # Parallel, level-1 overlap tests 3-4
271:   test:
272:     suffix: 12
273:     requires: triangle
274:     nsize: 3
275:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
276:   test:
277:     suffix: 13
278:     requires: triangle
279:     nsize: 8
280:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
281:   # Parallel, level-2 overlap test 5
282:   test:
283:     suffix: 14
284:     requires: triangle
285:     nsize: 8
286:     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail -partition_balance
287:   # Parallel load balancing, test 6-7
288:   test:
289:     suffix: 15
290:     requires: triangle
291:     nsize: 2
292:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
293:   test:
294:     suffix: 16
295:     requires: triangle
296:     nsize: 2
297:     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail -partition_balance
298:   # Parallel redundant copying, test 8
299:   test:
300:     suffix: 17
301:     requires: triangle
302:     nsize: 2
303:     args: -dm_coord_space 0 -test_redundant -dm_view ascii::ascii_info_detail -partition_balance
304:   test:
305:     suffix: lb_1
306:     requires: parmetis
307:     nsize: 4
308:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -partition_balance -prelb_dm_view ::load_balance -dm_view ::load_balance
309: TEST*/