Actual source code: ex1.c

  1: static const char help[] = "Test initialization and migration with swarm.\n";

  3: #include <petscdmplex.h>
  4: #include <petscdmswarm.h>

  6: typedef struct {
  7:   PetscReal L[3]; /* Dimensions of the mesh bounding box */
  8:   PetscBool setClosurePermutation;
  9: } AppCtx;

 11: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 12: {
 13:   PetscFunctionBeginUser;
 14:   PetscOptionsBegin(comm, "", "Swarm configuration options", "DMSWARM");
 15:   options->setClosurePermutation = PETSC_FALSE;
 16:   PetscCall(PetscOptionsBool("-set_closure_permutation", "Set the closure permutation to tensor ordering", NULL, options->setClosurePermutation, &options->setClosurePermutation, NULL));
 17:   PetscOptionsEnd();
 18:   PetscFunctionReturn(PETSC_SUCCESS);
 19: }

 21: static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm, AppCtx *user)
 22: {
 23:   PetscReal low[3], high[3];
 24:   PetscInt  cdim, d;

 26:   PetscFunctionBeginUser;
 27:   PetscCall(DMCreate(comm, dm));
 28:   PetscCall(DMSetType(*dm, DMPLEX));
 29:   PetscCall(DMSetFromOptions(*dm));
 30:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
 31:   if (user->setClosurePermutation) {
 32:     DM cdm;

 34:     // -- Set tensor permutation
 35:     PetscCall(DMGetCoordinateDM(*dm, &cdm));
 36:     PetscCall(DMPlexSetClosurePermutationTensor(*dm, PETSC_DETERMINE, NULL));
 37:     PetscCall(DMPlexSetClosurePermutationTensor(cdm, PETSC_DETERMINE, NULL));
 38:   }
 39:   PetscCall(DMGetCoordinateDim(*dm, &cdim));
 40:   PetscCall(DMGetBoundingBox(*dm, low, high));
 41:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "dim: %" PetscInt_FMT "\n", cdim));
 42:   for (d = 0; d < cdim; ++d) user->L[d] = high[d] - low[d];
 43:   PetscFunctionReturn(PETSC_SUCCESS);
 44: }

 46: /*
 47:   This function initializes all particles on rank 0.
 48:   They are sent to other ranks to test migration across non nearest neighbors
 49: */
 50: static PetscErrorCode CreateSwarm(DM dm, DM *sw, AppCtx *user)
 51: {
 52:   PetscInt    particleInitSize = 10;
 53:   PetscReal  *coords, upper[3], lower[3];
 54:   PetscInt   *cellid, Np, dim;
 55:   PetscMPIInt rank, size;
 56:   MPI_Comm    comm;

 58:   PetscFunctionBegin;
 59:   comm = PETSC_COMM_WORLD;
 60:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 61:   PetscCallMPI(MPI_Comm_size(comm, &size));
 62:   PetscCall(DMGetBoundingBox(dm, lower, upper));
 63:   PetscCall(DMCreate(PETSC_COMM_WORLD, sw));
 64:   PetscCall(DMGetDimension(dm, &dim));
 65:   PetscCall(DMSetType(*sw, DMSWARM));
 66:   PetscCall(DMSetDimension(*sw, dim));
 67:   PetscCall(DMSwarmSetType(*sw, DMSWARM_PIC));
 68:   PetscCall(DMSwarmSetCellDM(*sw, dm));
 69:   PetscCall(DMSwarmFinalizeFieldRegister(*sw));
 70:   PetscCall(DMSwarmSetLocalSizes(*sw, rank == 0 ? particleInitSize : 0, 0));
 71:   PetscCall(DMSetFromOptions(*sw));
 72:   PetscCall(DMSwarmGetField(*sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords));
 73:   PetscCall(DMSwarmGetField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **)&cellid));
 74:   PetscCall(DMSwarmGetLocalSize(*sw, &Np));
 75:   for (PetscInt p = 0; p < Np; ++p) {
 76:     for (PetscInt d = 0; d < dim; ++d) { coords[p * dim + d] = 0.5 * (upper[d] - lower[d]); }
 77:     coords[p * dim + 1] = (upper[1] - lower[1]) / particleInitSize * p + lower[1];
 78:     cellid[p]           = 0;
 79:   }
 80:   PetscCall(DMSwarmRestoreField(*sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords));
 81:   PetscCall(DMSwarmRestoreField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **)&cellid));
 82:   PetscCall(DMViewFromOptions(*sw, NULL, "-sw_view"));
 83:   PetscFunctionReturn(PETSC_SUCCESS);
 84: }

 86: /*
 87:   Configure the swarm on rank 0 with all particles
 88:   located there, then migrate where they need to be
 89: */
 90: static PetscErrorCode CheckMigrate(DM sw)
 91: {
 92:   Vec       preMigrate, postMigrate, tmp;
 93:   PetscInt  preSize, postSize;
 94:   PetscReal prenorm, postnorm;

 96:   PetscFunctionBeginUser;
 97:   PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
 98:   PetscCall(VecDuplicate(tmp, &preMigrate));
 99:   PetscCall(VecCopy(tmp, preMigrate));
100:   PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
101:   PetscCall(DMSwarmMigrate(sw, PETSC_TRUE));
102:   PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
103:   PetscCall(VecDuplicate(tmp, &postMigrate));
104:   PetscCall(VecCopy(tmp, postMigrate));
105:   PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
106:   PetscCall(VecGetSize(preMigrate, &preSize));
107:   PetscCall(VecGetSize(postMigrate, &postSize));
108:   PetscCheck(preSize == postSize, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "Particles either lost or duplicated. Pre migrate global size %" PetscInt_FMT " != Post migrate size %" PetscInt_FMT "", preSize, postSize);
109:   PetscCall(VecNorm(preMigrate, NORM_2, &prenorm));
110:   PetscCall(VecNorm(postMigrate, NORM_2, &postnorm));
111:   PetscCheck(PetscAbsReal(prenorm - postnorm) < 100. * PETSC_MACHINE_EPSILON, PETSC_COMM_SELF, PETSC_ERR_COR, "Particle coordinates corrupted in migrate with abs(norm(pre) - norm(post)) = %.16g", PetscAbsReal(prenorm - postnorm));
112:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Migrate check passes.\n"));
113:   PetscCall(VecDestroy(&preMigrate));
114:   PetscCall(VecDestroy(&postMigrate));
115:   PetscFunctionReturn(PETSC_SUCCESS);
116: }

118: /*
119:   Checks added points persist on migrate
120: */
121: static PetscErrorCode CheckPointInsertion(DM sw)
122: {
123:   PetscInt    Np_pre, Np_post;
124:   PetscMPIInt rank, size;
125:   MPI_Comm    comm;

127:   PetscFunctionBeginUser;
128:   comm = PETSC_COMM_WORLD;
129:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
130:   PetscCallMPI(MPI_Comm_size(comm, &size));
131:   PetscCall(PetscPrintf(comm, "Basic point insertion check...\n"));
132:   PetscCall(DMSwarmGetSize(sw, &Np_pre));
133:   if (rank == 0) PetscCall(DMSwarmAddPoint(sw));
134:   PetscCall(DMSwarmGetSize(sw, &Np_post));
135:   PetscCheck(Np_post == (Np_pre + 1), PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Particle insertion failed. Global size pre insertion: %" PetscInt_FMT " global size post insertion: %" PetscInt_FMT, Np_pre, Np_post);
136:   PetscCall(CheckMigrate(sw));
137:   PetscCall(PetscPrintf(comm, "Basic point insertion check passes.\n"));
138:   PetscFunctionReturn(PETSC_SUCCESS);
139: }

141: /*
142:   Checks tie breaking works properly when a particle
143:   is located at a shared boundary. The higher rank should
144:   receive the particle while the lower rank deletes it.

146:   TODO: Currently only works for 2 procs.
147: */
148: static PetscErrorCode CheckPointInsertion_Boundary(DM sw)
149: {
150:   PetscInt    Np_loc_pre, Np_loc_post, dim;
151:   PetscMPIInt rank, size;
152:   PetscReal   lbox_low[3], lbox_high[3], gbox_low[3], gbox_high[3];
153:   MPI_Comm    comm;
154:   DM          cdm;

156:   PetscFunctionBeginUser;
157:   comm = PETSC_COMM_WORLD;
158:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
159:   PetscCallMPI(MPI_Comm_size(comm, &size));
160:   PetscCall(PetscPrintf(comm, "Rank boundary point insertion check...\n"));
161:   PetscCall(DMSwarmGetCellDM(sw, &cdm));
162:   PetscCall(DMGetDimension(cdm, &dim));
163:   PetscCall(DMGetBoundingBox(cdm, gbox_low, gbox_high));
164:   if (rank == 0) {
165:     PetscReal *coords;
166:     PetscInt   adjacentdim = 0, Np;

168:     PetscCall(DMGetLocalBoundingBox(cdm, lbox_low, lbox_high));
169:     // find a face that belongs to the neighbor.
170:     for (PetscInt d = 0; d < dim; ++d) {
171:       if ((gbox_high[d] - lbox_high[d]) != 0.) adjacentdim = d;
172:     }
173:     PetscCall(DMSwarmAddPoint(sw));
174:     PetscCall(DMSwarmGetLocalSize(sw, &Np));
175:     PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords));
176:     for (PetscInt d = 0; d < dim; ++d) coords[(Np - 1) * dim + d] = 0.5 * (lbox_high[d] - lbox_low[d]);
177:     coords[(Np - 1) * dim + adjacentdim] = lbox_high[adjacentdim];
178:     PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords));
179:   }
180:   PetscCall(DMSwarmGetLocalSize(sw, &Np_loc_pre));
181:   PetscCall(CheckMigrate(sw));
182:   PetscCall(DMSwarmGetLocalSize(sw, &Np_loc_post));
183:   if (rank == 0) PetscCheck(Np_loc_pre == (Np_loc_post + 1), PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Migration tie breaking failed on rank %d. Particle on boundary not sent.", rank);
184:   if (rank == 1) PetscCheck(Np_loc_pre == (Np_loc_post - 1), PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Migration tie breaking failed on rank %d. Particle on boundary not received.", rank);
185:   PetscCall(PetscPrintf(comm, "Rank boundary point insertion check passes.\n"));
186:   PetscFunctionReturn(PETSC_SUCCESS);
187: }

189: int main(int argc, char **argv)
190: {
191:   DM       dm, sw;
192:   MPI_Comm comm;
193:   AppCtx   user;

195:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
196:   comm = PETSC_COMM_WORLD;
197:   PetscCall(ProcessOptions(comm, &user));
198:   PetscCall(CreateMesh(comm, &dm, &user));
199:   PetscCall(CreateSwarm(dm, &sw, &user));
200:   PetscCall(CheckMigrate(sw));
201:   PetscCall(CheckPointInsertion(sw));
202:   PetscCall(CheckPointInsertion_Boundary(sw));
203:   PetscCall(DMDestroy(&sw));
204:   PetscCall(DMDestroy(&dm));
205:   PetscCall(PetscFinalize());
206:   return 0;
207: }

209: /*TEST

211:   # Swarm does not handle complex or quad
212:   build:
213:     requires: !complex double
214:   # swarm_migrate_hash and swarm_migrate_scan test swarm migration against point location types
215:   # with a distributed mesh where ranks overlap by 1. Points in the shared boundary should
216:   # be sent to the process which has the highest rank that has that portion of the domain.
217:   test:
218:     suffix: swarm_migrate_hash
219:     nsize: 2
220:     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
221:           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
222:           -dm_plex_hash_location true
223:     filter: grep -v marker | grep -v atomic | grep -v usage
224:   test:
225:     suffix: swarm_migrate_hash_tensor_permutation
226:     nsize: 2
227:     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
228:           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
229:           -dm_plex_hash_location true -set_closure_permutation
230:     filter: grep -v marker | grep -v atomic | grep -v usage
231:   test:
232:     suffix: swarm_migrate_scan
233:     nsize: 2
234:     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
235:           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
236:           -dm_plex_hash_location false
237:     filter: grep -v marker | grep -v atomic | grep -v usage
238:   test:
239:     suffix: swarm_migrate_scan_tensor_permutation
240:     nsize: 2
241:     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
242:           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
243:           -dm_plex_hash_location false -set_closure_permutation
244:     filter: grep -v marker | grep -v atomic | grep -v usage
245: TEST*/