Actual source code: ex37.c

  1: static char help[] = "Test MatGetMultiProcBlock() and MatCreateRedundantMatrix() \n\
  2: Reads a PETSc matrix and vector from a file and solves a linear system.\n\n";
  3: /*
  4:   Example:
  5:   mpiexec -n 4 ./ex37 -f <input_file> -nsubcomm 2 -psubcomm_view -subcomm_type <1 or 2>
  6: */

  8: #include <petscksp.h>
  9: #include <petscsys.h>

 11: int main(int argc, char **args)
 12: {
 13:   KSP          subksp;
 14:   Mat          A, subA;
 15:   Vec          x, b, u, subb, subx, subu;
 16:   PetscViewer  fd;
 17:   char         file[PETSC_MAX_PATH_LEN];
 18:   PetscBool    flg;
 19:   PetscInt     i, m, n, its;
 20:   PetscReal    norm;
 21:   PetscMPIInt  rank, size;
 22:   MPI_Comm     comm, subcomm;
 23:   PetscSubcomm psubcomm;
 24:   PetscInt     nsubcomm = 1, id;
 25:   PetscScalar *barray, *xarray, *uarray, *array, one = 1.0;
 26:   PetscInt     type = 1;

 28:   PetscFunctionBeginUser;
 29:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 30:   /* Load the matrix */
 31:   PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), &flg));
 32:   PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must indicate binary file with the -f option");
 33:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));

 35:   /* Load the matrix; then destroy the viewer.*/
 36:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 37:   PetscCall(MatLoad(A, fd));
 38:   PetscCall(PetscViewerDestroy(&fd));

 40:   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
 41:   PetscCallMPI(MPI_Comm_size(comm, &size));
 42:   PetscCallMPI(MPI_Comm_rank(comm, &rank));

 44:   /* Create rhs vector b */
 45:   PetscCall(MatGetLocalSize(A, &m, NULL));
 46:   PetscCall(VecCreate(PETSC_COMM_WORLD, &b));
 47:   PetscCall(VecSetSizes(b, m, PETSC_DECIDE));
 48:   PetscCall(VecSetFromOptions(b));
 49:   PetscCall(VecSet(b, one));

 51:   PetscCall(VecDuplicate(b, &x));
 52:   PetscCall(VecDuplicate(b, &u));
 53:   PetscCall(VecSet(x, 0.0));

 55:   /* Test MatGetMultiProcBlock() */
 56:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-nsubcomm", &nsubcomm, NULL));
 57:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-subcomm_type", &type, NULL));

 59:   PetscCall(PetscSubcommCreate(comm, &psubcomm));
 60:   PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
 61:   if (type == PETSC_SUBCOMM_GENERAL) { /* user provides color, subrank and duprank */
 62:     PetscMPIInt color, subrank, duprank, subsize;
 63:     duprank = size - 1 - rank;
 64:     subsize = size / nsubcomm;
 65:     PetscCheck(subsize * nsubcomm == size, comm, PETSC_ERR_SUP, "This example requires nsubcomm %" PetscInt_FMT " divides size %d", nsubcomm, size);
 66:     color   = duprank / subsize;
 67:     subrank = duprank - color * subsize;
 68:     PetscCall(PetscSubcommSetTypeGeneral(psubcomm, color, subrank));
 69:   } else if (type == PETSC_SUBCOMM_CONTIGUOUS) {
 70:     PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
 71:   } else if (type == PETSC_SUBCOMM_INTERLACED) {
 72:     PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_INTERLACED));
 73:   } else SETERRQ(psubcomm->parent, PETSC_ERR_SUP, "PetscSubcommType %" PetscInt_FMT " is not supported yet", type);
 74:   PetscCall(PetscSubcommSetFromOptions(psubcomm));
 75:   subcomm = PetscSubcommChild(psubcomm);

 77:   /* Test MatCreateRedundantMatrix() */
 78:   if (size > 1) {
 79:     PetscMPIInt subrank = -1, color = -1;
 80:     MPI_Comm    dcomm;

 82:     if (rank == 0) {
 83:       color   = 0;
 84:       subrank = 0;
 85:     } else if (rank == 1) {
 86:       color   = 0;
 87:       subrank = 1;
 88:     } else {
 89:       color   = 1;
 90:       subrank = 0;
 91:     }

 93:     PetscCall(PetscCommDuplicate(PETSC_COMM_WORLD, &dcomm, NULL));
 94:     PetscCallMPI(MPI_Comm_split(dcomm, color, subrank, &subcomm));

 96:     PetscCall(MatCreate(subcomm, &subA));
 97:     PetscCall(MatSetSizes(subA, PETSC_DECIDE, PETSC_DECIDE, 10, 10));
 98:     PetscCall(MatSetFromOptions(subA));
 99:     PetscCall(MatSetUp(subA));
100:     PetscCall(MatAssemblyBegin(subA, MAT_FINAL_ASSEMBLY));
101:     PetscCall(MatAssemblyEnd(subA, MAT_FINAL_ASSEMBLY));
102:     PetscCall(MatZeroEntries(subA));

104:     /* Test MatMult() */
105:     PetscCall(MatCreateVecs(subA, &subx, &subb));
106:     PetscCall(VecSet(subx, 1.0));
107:     PetscCall(MatMult(subA, subx, subb));

109:     PetscCall(VecDestroy(&subx));
110:     PetscCall(VecDestroy(&subb));
111:     PetscCall(MatDestroy(&subA));
112:     PetscCall(PetscCommDestroy(&dcomm));
113:   }

115:   /* Create subA */
116:   PetscCall(MatGetMultiProcBlock(A, subcomm, MAT_INITIAL_MATRIX, &subA));
117:   PetscCall(MatGetMultiProcBlock(A, subcomm, MAT_REUSE_MATRIX, &subA));

119:   /* Create sub vectors without arrays. Place b's and x's local arrays into subb and subx */
120:   PetscCall(MatGetLocalSize(subA, &m, &n));
121:   PetscCall(VecCreateMPIWithArray(subcomm, 1, m, PETSC_DECIDE, NULL, &subb));
122:   PetscCall(VecCreateMPIWithArray(subcomm, 1, n, PETSC_DECIDE, NULL, &subx));
123:   PetscCall(VecCreateMPIWithArray(subcomm, 1, n, PETSC_DECIDE, NULL, &subu));

125:   PetscCall(VecGetArray(b, &barray));
126:   PetscCall(VecGetArray(x, &xarray));
127:   PetscCall(VecGetArray(u, &uarray));
128:   PetscCall(VecPlaceArray(subb, barray));
129:   PetscCall(VecPlaceArray(subx, xarray));
130:   PetscCall(VecPlaceArray(subu, uarray));

132:   /* Create linear solvers associated with subA */
133:   PetscCall(KSPCreate(subcomm, &subksp));
134:   PetscCall(KSPSetOperators(subksp, subA, subA));
135:   PetscCall(KSPSetFromOptions(subksp));

137:   /* Solve sub systems */
138:   PetscCall(KSPSolve(subksp, subb, subx));
139:   PetscCall(KSPGetIterationNumber(subksp, &its));

141:   /* check residual */
142:   PetscCall(MatMult(subA, subx, subu));
143:   PetscCall(VecAXPY(subu, -1.0, subb));
144:   PetscCall(VecNorm(u, NORM_2, &norm));
145:   if (norm > 1.e-4 && rank == 0) {
146:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "[%d]  Number of iterations = %3" PetscInt_FMT "\n", rank, its));
147:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error: Residual norm of each block |subb - subA*subx |= %g\n", (double)norm));
148:   }
149:   PetscCall(VecResetArray(subb));
150:   PetscCall(VecResetArray(subx));
151:   PetscCall(VecResetArray(subu));

153:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-subvec_view", &id, &flg));
154:   if (flg && rank == id) {
155:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] subb:\n", rank));
156:     PetscCall(VecGetArray(subb, &array));
157:     for (i = 0; i < m; i++) PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g\n", (double)PetscRealPart(array[i])));
158:     PetscCall(VecRestoreArray(subb, &array));
159:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] subx:\n", rank));
160:     PetscCall(VecGetArray(subx, &array));
161:     for (i = 0; i < m; i++) PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g\n", (double)PetscRealPart(array[i])));
162:     PetscCall(VecRestoreArray(subx, &array));
163:   }

165:   PetscCall(VecRestoreArray(x, &xarray));
166:   PetscCall(VecRestoreArray(b, &barray));
167:   PetscCall(VecRestoreArray(u, &uarray));
168:   PetscCall(MatDestroy(&subA));
169:   PetscCall(VecDestroy(&subb));
170:   PetscCall(VecDestroy(&subx));
171:   PetscCall(VecDestroy(&subu));
172:   PetscCall(KSPDestroy(&subksp));
173:   PetscCall(PetscSubcommDestroy(&psubcomm));
174:   if (size > 1) PetscCallMPI(MPI_Comm_free(&subcomm));
175:   PetscCall(MatDestroy(&A));
176:   PetscCall(VecDestroy(&b));
177:   PetscCall(VecDestroy(&u));
178:   PetscCall(VecDestroy(&x));

180:   PetscCall(PetscFinalize());
181:   return 0;
182: }

184: /*TEST

186:     test:
187:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 1
188:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
189:       output_file: output/ex37.out

191:     test:
192:       suffix: 2
193:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2
194:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
195:       nsize: 4
196:       output_file: output/ex37.out

198:     test:
199:       suffix: mumps
200:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -pc_factor_mat_solver_type mumps -pc_type lu
201:       requires: datafilespath mumps !complex double !defined(PETSC_USE_64BIT_INDICES)
202:       nsize: 4
203:       output_file: output/ex37.out

205:     test:
206:       suffix: 3
207:       nsize: 4
208:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 0
209:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
210:       output_file: output/ex37.out

212:     test:
213:       suffix: 4
214:       nsize: 4
215:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 1
216:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
217:       output_file: output/ex37.out

219:     test:
220:       suffix: 5
221:       nsize: 4
222:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 2
223:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
224:       output_file: output/ex37.out

226: TEST*/