Actual source code: ex6.c

  1: static char help[] = "Tests various 3-dimensional DMDA routines.\n\n";

  3: #include <petscdm.h>
  4: #include <petscdmda.h>
  5: #include <petscao.h>

  7: int main(int argc, char **argv)
  8: {
  9:   PetscMPIInt     rank;
 10:   PetscInt        M = 3, N = 5, P = 3, s = 1, w = 2, nloc, l, i, j, k, kk, m = PETSC_DECIDE, n = PETSC_DECIDE, p = PETSC_DECIDE;
 11:   PetscInt        Xs, Xm, Ys, Ym, Zs, Zm, iloc, *iglobal;
 12:   const PetscInt *ltog;
 13:   PetscInt       *lx = NULL, *ly = NULL, *lz = NULL;
 14:   PetscBool       test_order = PETSC_FALSE;
 15:   DM              da;
 16:   PetscViewer     viewer;
 17:   Vec             local, global;
 18:   PetscScalar     value;
 19:   DMBoundaryType  bx = DM_BOUNDARY_NONE, by = DM_BOUNDARY_NONE, bz = DM_BOUNDARY_NONE;
 20:   DMDAStencilType stencil_type = DMDA_STENCIL_BOX;
 21:   AO              ao;
 22:   PetscBool       flg = PETSC_FALSE;

 24:   PetscFunctionBeginUser;
 25:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 26:   PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 300, 0, 400, 300, &viewer));

 28:   /* Read options */
 29:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-NX", &M, NULL));
 30:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-NY", &N, NULL));
 31:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-NZ", &P, NULL));
 32:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
 33:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
 34:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-p", &p, NULL));
 35:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-s", &s, NULL));
 36:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-w", &w, NULL));
 37:   flg = PETSC_FALSE;
 38:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-star", &flg, NULL));
 39:   if (flg) stencil_type = DMDA_STENCIL_STAR;
 40:   flg = PETSC_FALSE;
 41:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-box", &flg, NULL));
 42:   if (flg) stencil_type = DMDA_STENCIL_BOX;

 44:   flg = PETSC_FALSE;
 45:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-xperiodic", &flg, NULL));
 46:   if (flg) bx = DM_BOUNDARY_PERIODIC;
 47:   flg = PETSC_FALSE;
 48:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-xghosted", &flg, NULL));
 49:   if (flg) bx = DM_BOUNDARY_GHOSTED;
 50:   flg = PETSC_FALSE;
 51:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-xnonghosted", &flg, NULL));

 53:   flg = PETSC_FALSE;
 54:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-yperiodic", &flg, NULL));
 55:   if (flg) by = DM_BOUNDARY_PERIODIC;
 56:   flg = PETSC_FALSE;
 57:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-yghosted", &flg, NULL));
 58:   if (flg) by = DM_BOUNDARY_GHOSTED;
 59:   flg = PETSC_FALSE;
 60:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-ynonghosted", &flg, NULL));

 62:   flg = PETSC_FALSE;
 63:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-zperiodic", &flg, NULL));
 64:   if (flg) bz = DM_BOUNDARY_PERIODIC;
 65:   flg = PETSC_FALSE;
 66:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-zghosted", &flg, NULL));
 67:   if (flg) bz = DM_BOUNDARY_GHOSTED;
 68:   flg = PETSC_FALSE;
 69:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-znonghosted", &flg, NULL));

 71:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-testorder", &test_order, NULL));

 73:   flg = PETSC_FALSE;
 74:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-distribute", &flg, NULL));
 75:   if (flg) {
 76:     PetscCheck(m != PETSC_DECIDE, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must set -m option with -distribute option");
 77:     PetscCall(PetscMalloc1(m, &lx));
 78:     for (i = 0; i < m - 1; i++) lx[i] = 4;
 79:     lx[m - 1] = M - 4 * (m - 1);
 80:     PetscCheck(n != PETSC_DECIDE, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must set -n option with -distribute option");
 81:     PetscCall(PetscMalloc1(n, &ly));
 82:     for (i = 0; i < n - 1; i++) ly[i] = 2;
 83:     ly[n - 1] = N - 2 * (n - 1);
 84:     PetscCheck(p != PETSC_DECIDE, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must set -p option with -distribute option");
 85:     PetscCall(PetscMalloc1(p, &lz));
 86:     for (i = 0; i < p - 1; i++) lz[i] = 2;
 87:     lz[p - 1] = P - 2 * (p - 1);
 88:   }

 90:   /* Create distributed array and get vectors */
 91:   PetscCall(DMDACreate3d(PETSC_COMM_WORLD, bx, by, bz, stencil_type, M, N, P, m, n, p, w, s, lx, ly, lz, &da));
 92:   PetscCall(DMSetFromOptions(da));
 93:   PetscCall(DMSetUp(da));
 94:   PetscCall(PetscFree(lx));
 95:   PetscCall(PetscFree(ly));
 96:   PetscCall(PetscFree(lz));
 97:   PetscCall(DMView(da, viewer));
 98:   PetscCall(DMCreateGlobalVector(da, &global));
 99:   PetscCall(DMCreateLocalVector(da, &local));

101:   /* Set global vector; send ghost points to local vectors */
102:   value = 1;
103:   PetscCall(VecSet(global, value));
104:   PetscCall(DMGlobalToLocalBegin(da, global, INSERT_VALUES, local));
105:   PetscCall(DMGlobalToLocalEnd(da, global, INSERT_VALUES, local));

107:   /* Scale local vectors according to processor rank; pass to global vector */
108:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
109:   value = rank;
110:   PetscCall(VecScale(local, value));
111:   PetscCall(DMLocalToGlobalBegin(da, local, INSERT_VALUES, global));
112:   PetscCall(DMLocalToGlobalEnd(da, local, INSERT_VALUES, global));

114:   if (!test_order) { /* turn off printing when testing ordering mappings */
115:     if (M * N * P < 40) {
116:       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nGlobal Vector:\n"));
117:       PetscCall(VecView(global, PETSC_VIEWER_STDOUT_WORLD));
118:       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\n"));
119:     }
120:   }

122:   /* Send ghost points to local vectors */
123:   PetscCall(DMGlobalToLocalBegin(da, global, INSERT_VALUES, local));
124:   PetscCall(DMGlobalToLocalEnd(da, global, INSERT_VALUES, local));

126:   flg = PETSC_FALSE;
127:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-local_print", &flg, NULL));
128:   if (flg) {
129:     PetscViewer sviewer;
130:     PetscCall(PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD));
131:     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\nLocal Vector: processor %d\n", rank));
132:     PetscCall(PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &sviewer));
133:     PetscCall(VecView(local, sviewer));
134:     PetscCall(PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &sviewer));
135:     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
136:     PetscCall(PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD));
137:   }

139:   /* Tests mappings between application/PETSc orderings */
140:   if (test_order) {
141:     ISLocalToGlobalMapping ltogm;

143:     PetscCall(DMGetLocalToGlobalMapping(da, &ltogm));
144:     PetscCall(ISLocalToGlobalMappingGetSize(ltogm, &nloc));
145:     PetscCall(ISLocalToGlobalMappingGetIndices(ltogm, &ltog));

147:     PetscCall(DMDAGetGhostCorners(da, &Xs, &Ys, &Zs, &Xm, &Ym, &Zm));
148:     PetscCall(DMDAGetAO(da, &ao));
149:     /* PetscCall(AOView(ao,PETSC_VIEWER_STDOUT_WORLD)); */
150:     PetscCall(PetscMalloc1(nloc, &iglobal));

152:     /* Set iglobal to be global indices for each processor's local and ghost nodes,
153:        using the DMDA ordering of grid points */
154:     kk = 0;
155:     for (k = Zs; k < Zs + Zm; k++) {
156:       for (j = Ys; j < Ys + Ym; j++) {
157:         for (i = Xs; i < Xs + Xm; i++) {
158:           iloc = w * ((k - Zs) * Xm * Ym + (j - Ys) * Xm + i - Xs);
159:           for (l = 0; l < w; l++) iglobal[kk++] = ltog[iloc + l];
160:         }
161:       }
162:     }

164:     /* Map this to the application ordering (which for DMDAs is just the natural ordering
165:        that would be used for 1 processor, numbering most rapidly by x, then y, then z) */
166:     PetscCall(AOPetscToApplication(ao, nloc, iglobal));

168:     /* Then map the application ordering back to the PETSc DMDA ordering */
169:     PetscCall(AOApplicationToPetsc(ao, nloc, iglobal));

171:     /* Verify the mappings */
172:     kk = 0;
173:     for (k = Zs; k < Zs + Zm; k++) {
174:       for (j = Ys; j < Ys + Ym; j++) {
175:         for (i = Xs; i < Xs + Xm; i++) {
176:           iloc = w * ((k - Zs) * Xm * Ym + (j - Ys) * Xm + i - Xs);
177:           for (l = 0; l < w; l++) {
178:             if (iglobal[kk] != ltog[iloc + l]) {
179:               PetscCall(PetscPrintf(MPI_COMM_WORLD, "[%d] Problem with mapping: z=%" PetscInt_FMT ", j=%" PetscInt_FMT ", i=%" PetscInt_FMT ", l=%" PetscInt_FMT ", petsc1=%" PetscInt_FMT ", petsc2=%" PetscInt_FMT "\n", rank, k, j, i, l, ltog[iloc + l], iglobal[kk]));
180:             }
181:             kk++;
182:           }
183:         }
184:       }
185:     }
186:     PetscCall(PetscFree(iglobal));
187:     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogm, &ltog));
188:   }

190:   /* Free memory */
191:   PetscCall(PetscViewerDestroy(&viewer));
192:   PetscCall(VecDestroy(&local));
193:   PetscCall(VecDestroy(&global));
194:   PetscCall(DMDestroy(&da));
195:   PetscCall(PetscFinalize());
196:   return 0;
197: }

199: /*TEST

201:     test:
202:       args:  -testorder -nox

204:  TEST*/