Actual source code: ex3.c

  1: static char help[] = "Test PetscSFFetchAndOp on patterned SF graphs. PetscSFFetchAndOp internally uses PetscSFBcastAndOp \n\
  2:  and PetscSFReduce. So it is a good test to see if they all work for patterned graphs.\n\
  3:  Run with ./prog -op [replace | sum]\n\n";

  5: #include <petscvec.h>
  6: #include <petscsf.h>
  7: int main(int argc, char **argv)
  8: {
  9:   PetscInt     i, N = 10, low, high, nleaves;
 10:   PetscMPIInt  size, rank;
 11:   Vec          x, y, y2, gy2;
 12:   PetscScalar *rootdata, *leafdata, *leafupdate;
 13:   PetscLayout  layout;
 14:   PetscSF      gathersf, allgathersf, alltoallsf;
 15:   MPI_Op       op = MPI_SUM;
 16:   char         opname[64];
 17:   const char  *mpiopname;
 18:   PetscBool    flag, isreplace, issum;

 20:   PetscFunctionBeginUser;
 21:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 22:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 23:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));

 25:   PetscCall(PetscOptionsGetString(NULL, NULL, "-op", opname, sizeof(opname), &flag));
 26:   PetscCall(PetscStrcmp(opname, "replace", &isreplace));
 27:   PetscCall(PetscStrcmp(opname, "sum", &issum));

 29:   if (isreplace) {
 30:     op        = MPI_REPLACE;
 31:     mpiopname = "MPI_REPLACE";
 32:   } else if (issum) {
 33:     op        = MPIU_SUM;
 34:     mpiopname = "MPI_SUM";
 35:   } else SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Unsupported argument (%s) to -op, which must be 'replace' or 'sum'", opname);

 37:   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
 38:   PetscCall(VecSetFromOptions(x));
 39:   PetscCall(VecSetSizes(x, PETSC_DECIDE, N));

 41:   /*-------------------------------------*/
 42:   /*       PETSCSF_PATTERN_GATHER        */
 43:   /*-------------------------------------*/

 45:   /* set MPI vec x to [1, 2, .., N] */
 46:   PetscCall(VecGetOwnershipRange(x, &low, &high));
 47:   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i + 1.0, INSERT_VALUES));
 48:   PetscCall(VecAssemblyBegin(x));
 49:   PetscCall(VecAssemblyEnd(x));

 51:   /* Create the gather SF */
 52:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_GATHER graph with op = %s\n", mpiopname));
 53:   PetscCall(VecGetLayout(x, &layout));
 54:   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &gathersf));
 55:   PetscCall(PetscSFSetGraphWithPattern(gathersf, layout, PETSCSF_PATTERN_GATHER));

 57:   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
 58:   PetscCall(PetscSFGetGraph(gathersf, NULL, &nleaves, NULL, NULL));
 59:   PetscCall(VecCreateSeq(PETSC_COMM_SELF, nleaves, &y));
 60:   PetscCall(VecDuplicate(y, &y2));

 62:   PetscCall(VecGetArray(x, &rootdata));
 63:   PetscCall(VecGetArray(y, &leafdata));
 64:   PetscCall(VecGetArray(y2, &leafupdate));

 66:   /* Bcast x to y,to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
 67:   PetscCall(PetscSFBcastBegin(gathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
 68:   PetscCall(PetscSFBcastEnd(gathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
 69:   PetscCall(VecRestoreArray(y, &leafdata));
 70:   PetscCall(VecScale(y, 2));
 71:   PetscCall(VecGetArray(y, &leafdata));

 73:   /* FetchAndOp x to y */
 74:   PetscCall(PetscSFFetchAndOpBegin(gathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
 75:   PetscCall(PetscSFFetchAndOpEnd(gathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));

 77:   /* View roots (x) and leafupdate (y2). Since this is a gather graph, leafudpate = rootdata = [1,N], then rootdata += leafdata, i.e., [3,3*N] */
 78:   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, nleaves, PETSC_DECIDE, leafupdate, &gy2));
 79:   PetscCall(PetscObjectSetName((PetscObject)x, "rootdata"));
 80:   PetscCall(PetscObjectSetName((PetscObject)gy2, "leafupdate"));

 82:   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
 83:   PetscCall(VecView(gy2, PETSC_VIEWER_STDOUT_WORLD));
 84:   PetscCall(VecDestroy(&gy2));

 86:   PetscCall(VecRestoreArray(y2, &leafupdate));
 87:   PetscCall(VecDestroy(&y2));

 89:   PetscCall(VecRestoreArray(y, &leafdata));
 90:   PetscCall(VecDestroy(&y));

 92:   PetscCall(VecRestoreArray(x, &rootdata));
 93:   /* PetscCall(VecDestroy(&x)); */ /* We will reuse x in ALLGATHER, so do not destroy it */

 95:   PetscCall(PetscSFDestroy(&gathersf));

 97:   /*-------------------------------------*/
 98:   /*       PETSCSF_PATTERN_ALLGATHER     */
 99:   /*-------------------------------------*/

101:   /* set MPI vec x to [1, 2, .., N] */
102:   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i + 1.0, INSERT_VALUES));
103:   PetscCall(VecAssemblyBegin(x));
104:   PetscCall(VecAssemblyEnd(x));

106:   /* Create the allgather SF */
107:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLGATHER graph with op = %s\n", mpiopname));
108:   PetscCall(VecGetLayout(x, &layout));
109:   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &allgathersf));
110:   PetscCall(PetscSFSetGraphWithPattern(allgathersf, layout, PETSCSF_PATTERN_ALLGATHER));

112:   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
113:   PetscCall(PetscSFGetGraph(allgathersf, NULL, &nleaves, NULL, NULL));
114:   PetscCall(VecCreateSeq(PETSC_COMM_SELF, nleaves, &y));
115:   PetscCall(VecDuplicate(y, &y2));

117:   PetscCall(VecGetArray(x, &rootdata));
118:   PetscCall(VecGetArray(y, &leafdata));
119:   PetscCall(VecGetArray(y2, &leafupdate));

121:   /* Bcast x to y, to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
122:   PetscCall(PetscSFBcastBegin(allgathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
123:   PetscCall(PetscSFBcastEnd(allgathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
124:   PetscCall(VecRestoreArray(y, &leafdata));
125:   PetscCall(VecScale(y, 2));
126:   PetscCall(VecGetArray(y, &leafdata));

128:   /* FetchAndOp x to y */
129:   PetscCall(PetscSFFetchAndOpBegin(allgathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
130:   PetscCall(PetscSFFetchAndOpEnd(allgathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));

132:   /* View roots (x) and leafupdate (y2). Since this is an allgather graph, we have (suppose ranks get updates in ascending order)
133:      rank 0: leafupdate = rootdata = [1,N],   rootdata += leafdata = [3,3*N]
134:      rank 1: leafupdate = rootdata = [3,3*N], rootdata += leafdata = [5,5*N]
135:      rank 2: leafupdate = rootdata = [5,5*N], rootdata += leafdata = [7,7*N]
136:      ...
137:    */
138:   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, nleaves, PETSC_DECIDE, leafupdate, &gy2));
139:   PetscCall(PetscObjectSetName((PetscObject)x, "rootdata"));
140:   PetscCall(PetscObjectSetName((PetscObject)gy2, "leafupdate"));

142:   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
143:   PetscCall(VecView(gy2, PETSC_VIEWER_STDOUT_WORLD));
144:   PetscCall(VecDestroy(&gy2));

146:   PetscCall(VecRestoreArray(y2, &leafupdate));
147:   PetscCall(VecDestroy(&y2));

149:   PetscCall(VecRestoreArray(y, &leafdata));
150:   PetscCall(VecDestroy(&y));

152:   PetscCall(VecRestoreArray(x, &rootdata));
153:   PetscCall(VecDestroy(&x)); /* We won't reuse x in ALLGATHER, so destroy it */

155:   PetscCall(PetscSFDestroy(&allgathersf));

157:   /*-------------------------------------*/
158:   /*       PETSCSF_PATTERN_ALLTOALL     */
159:   /*-------------------------------------*/

161:   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
162:   PetscCall(VecSetFromOptions(x));
163:   PetscCall(VecSetSizes(x, size, PETSC_DECIDE));

165:   /* set MPI vec x to [1, 2, .., size^2] */
166:   PetscCall(VecGetOwnershipRange(x, &low, &high));
167:   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i + 1.0, INSERT_VALUES));
168:   PetscCall(VecAssemblyBegin(x));
169:   PetscCall(VecAssemblyEnd(x));

171:   /* Create the alltoall SF */
172:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLTOALL graph with op = %s\n", mpiopname));
173:   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &alltoallsf));
174:   PetscCall(PetscSFSetGraphWithPattern(alltoallsf, NULL /*insignificant*/, PETSCSF_PATTERN_ALLTOALL));

176:   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
177:   PetscCall(PetscSFGetGraph(alltoallsf, NULL, &nleaves, NULL, NULL));
178:   PetscCall(VecCreateSeq(PETSC_COMM_SELF, nleaves, &y));
179:   PetscCall(VecDuplicate(y, &y2));

181:   PetscCall(VecGetArray(x, &rootdata));
182:   PetscCall(VecGetArray(y, &leafdata));
183:   PetscCall(VecGetArray(y2, &leafupdate));

185:   /* Bcast x to y, to initialize y = 1+rank+size*i, with i=0..size-1 */
186:   PetscCall(PetscSFBcastBegin(alltoallsf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
187:   PetscCall(PetscSFBcastEnd(alltoallsf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));

189:   /* FetchAndOp x to y */
190:   PetscCall(PetscSFFetchAndOpBegin(alltoallsf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
191:   PetscCall(PetscSFFetchAndOpEnd(alltoallsf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));

193:   /* View roots (x) and leafupdate (y2). Since this is an alltoall graph, each root has only one leaf.
194:      So, leafupdate = rootdata = 1+rank+size*i, i=0..size-1; and rootdata += leafdata, i.e., rootdata = [2,2*N]
195:    */
196:   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, nleaves, PETSC_DECIDE, leafupdate, &gy2));
197:   PetscCall(PetscObjectSetName((PetscObject)x, "rootdata"));
198:   PetscCall(PetscObjectSetName((PetscObject)gy2, "leafupdate"));

200:   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
201:   PetscCall(VecView(gy2, PETSC_VIEWER_STDOUT_WORLD));
202:   PetscCall(VecDestroy(&gy2));

204:   PetscCall(VecRestoreArray(y2, &leafupdate));
205:   PetscCall(VecDestroy(&y2));

207:   PetscCall(VecRestoreArray(y, &leafdata));
208:   PetscCall(VecDestroy(&y));

210:   PetscCall(VecRestoreArray(x, &rootdata));
211:   PetscCall(VecDestroy(&x));

213:   PetscCall(PetscSFDestroy(&alltoallsf));

215:   PetscCall(PetscFinalize());
216:   return 0;
217: }

219: /*TEST

221:    test:
222:       # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
223:       #MPI_Sendrecv_replace is broken with 20210400300
224:       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
225:       nsize: 2
226:       args: -op replace

228:    test:
229:       suffix: 2
230:       nsize: 2
231:       args: -op sum

233:    # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
234:    test:
235:       #MPI_Sendrecv_replace is broken with 20210400300
236:       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
237:       suffix: 3
238:       nsize: 3
239:       args: -op replace

241:    test:
242:       suffix: 4
243:       nsize: 3
244:       args: -op sum

246: TEST*/