Actual source code: ex87.c

  1: #include <petscksp.h>

  3: static char help[] = "Solves a saddle-point linear system using PCHPDDM.\n\n";

  5: static PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, IS is, Mat N, PetscMPIInt rank, PetscMPIInt size);

  7: int main(int argc, char **args)
  8: {
  9:   Vec               b;               /* computed solution and RHS */
 10:   Mat               A[4], aux[2], S; /* linear system matrix */
 11:   KSP               ksp, *subksp;    /* linear solver context */
 12:   PC                pc;
 13:   IS                is[2];
 14:   PetscMPIInt       rank, size;
 15:   PetscInt          m, M, n, N, id = 0;
 16:   PetscViewer       viewer;
 17:   const char *const system[] = {"elasticity", "stokes"};
 18:   /* "elasticity":
 19:    *    2D linear elasticity with rubber-like and steel-like material coefficients, i.e., Poisson's ratio \in {0.4999, 0.35} and Young's modulus \in {0.01 GPa, 200.0 GPa}
 20:    *      discretized by order 2 (resp. 0) Lagrange finite elements in displacements (resp. pressure) on a triangle mesh
 21:    * "stokes":
 22:    *    2D lid-driven cavity with constant viscosity
 23:    *      discretized by order 2 (resp. 1) Lagrange finite elements, i.e., lowest-order Taylor--Hood finite elements, in velocities (resp. pressure) on a triangle mesh
 24:    *      if the option -empty_A11 is not set (or set to false), a pressure with a zero mean-value is computed
 25:    */
 26:   char      dir[PETSC_MAX_PATH_LEN], prefix[PETSC_MAX_PATH_LEN];
 27:   PetscBool flg[3] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};

 29:   PetscFunctionBeginUser;
 30:   PetscCall(PetscInitialize(&argc, &args, NULL, help));
 31:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 32:   PetscCheck(size == 4, PETSC_COMM_WORLD, PETSC_ERR_USER, "This example requires 4 processes");
 33:   PetscCall(PetscOptionsGetEList(NULL, NULL, "-system", system, PETSC_STATIC_ARRAY_LENGTH(system), &id, NULL));
 34:   if (id == 1) PetscCall(PetscOptionsGetBool(NULL, NULL, "-empty_A11", flg, NULL));
 35:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 36:   for (PetscInt i = 0; i < 2; ++i) {
 37:     PetscCall(MatCreate(PETSC_COMM_WORLD, A + (i ? 3 : 0)));
 38:     PetscCall(ISCreate(PETSC_COMM_SELF, is + i));
 39:     PetscCall(MatCreate(PETSC_COMM_SELF, aux + i));
 40:   }
 41:   PetscCall(PetscStrncpy(dir, ".", sizeof(dir)));
 42:   PetscCall(PetscOptionsGetString(NULL, NULL, "-load_dir", dir, sizeof(dir), NULL));
 43:   /* loading matrices and auxiliary data for the diagonal blocks */
 44:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/%s", dir, id == 1 ? "B" : "A"));
 45:   PetscCall(MatAndISLoad(prefix, "00", A[0], is[0], aux[0], rank, size));
 46:   PetscCall(MatAndISLoad(prefix, "11", A[3], is[1], aux[1], rank, size));
 47:   /* loading the off-diagonal block with a coherent row/column layout */
 48:   PetscCall(MatCreate(PETSC_COMM_WORLD, A + 2));
 49:   PetscCall(MatGetLocalSize(A[0], &n, NULL));
 50:   PetscCall(MatGetSize(A[0], &N, NULL));
 51:   PetscCall(MatGetLocalSize(A[3], &m, NULL));
 52:   PetscCall(MatGetSize(A[3], &M, NULL));
 53:   PetscCall(MatSetSizes(A[2], m, n, M, N));
 54:   PetscCall(MatSetUp(A[2]));
 55:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/%s10.dat", dir, id == 1 ? "B" : "A"));
 56:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
 57:   PetscCall(MatLoad(A[2], viewer));
 58:   PetscCall(PetscViewerDestroy(&viewer));
 59:   /* transposing the off-diagonal block */
 60:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-transpose", flg + 1, NULL));
 61:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-permute", flg + 2, NULL));
 62:   if (flg[1]) {
 63:     if (!flg[2]) PetscCall(MatCreateTranspose(A[2], A + 1));
 64:     else {
 65:       PetscCall(MatTranspose(A[2], MAT_INITIAL_MATRIX, A + 1));
 66:       PetscCall(MatDestroy(A + 2));
 67:       PetscCall(MatCreateTranspose(A[1], A + 2));
 68:     }
 69:   } else {
 70:     if (!flg[2]) PetscCall(MatCreateHermitianTranspose(A[2], A + 1));
 71:     else {
 72:       PetscCall(MatHermitianTranspose(A[2], MAT_INITIAL_MATRIX, A + 1));
 73:       PetscCall(MatDestroy(A + 2));
 74:       PetscCall(MatCreateHermitianTranspose(A[1], A + 2));
 75:     }
 76:   }
 77:   if (flg[0]) PetscCall(MatDestroy(A + 3));
 78:   /* global coefficient matrix */
 79:   PetscCall(MatCreateNest(PETSC_COMM_WORLD, 2, NULL, 2, NULL, A, &S));
 80:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
 81:   PetscCall(KSPSetOperators(ksp, S, S));
 82:   PetscCall(KSPGetPC(ksp, &pc));
 83:   /* outer preconditioner */
 84:   PetscCall(PCSetType(pc, PCFIELDSPLIT));
 85:   PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
 86:   PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
 87:   PetscCall(PCSetUp(pc));
 88:   PetscCall(PCFieldSplitGetSubKSP(pc, &n, &subksp));
 89:   PetscCall(KSPGetPC(subksp[0], &pc));
 90:   /* inner preconditioner associated to top-left block */
 91:   PetscCall(PCSetType(pc, PCHPDDM));
 92:   PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[0], aux[0], NULL, NULL));
 93:   PetscCall(PCSetFromOptions(pc));
 94:   PetscCall(KSPGetPC(subksp[1], &pc));
 95:   /* inner preconditioner associated to Schur complement, which will be set internally to a PCKSP */
 96:   PetscCall(PCSetType(pc, PCHPDDM));
 97:   if (!flg[0]) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[1], aux[1], NULL, NULL));
 98:   PetscCall(PCSetFromOptions(pc));
 99:   PetscCall(PetscFree(subksp));
100:   PetscCall(KSPSetFromOptions(ksp));
101:   PetscCall(MatCreateVecs(S, &b, NULL));
102:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/rhs_%s.dat", dir, id == 1 ? "B" : "A"));
103:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
104:   PetscCall(VecLoad(b, viewer));
105:   PetscCall(PetscViewerDestroy(&viewer));
106:   PetscCall(KSPSolve(ksp, b, b));
107:   flg[0] = PETSC_FALSE;
108:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewer", flg, NULL));
109:   if (flg[0]) PetscCall(PCView(pc, PETSC_VIEWER_STDOUT_WORLD));
110:   PetscCall(VecDestroy(&b));
111:   PetscCall(KSPDestroy(&ksp));
112:   PetscCall(MatDestroy(&S));
113:   PetscCall(MatDestroy(A + 1));
114:   PetscCall(MatDestroy(A + 2));
115:   for (PetscInt i = 0; i < 2; ++i) {
116:     PetscCall(MatDestroy(A + (i ? 3 : 0)));
117:     PetscCall(MatDestroy(aux + i));
118:     PetscCall(ISDestroy(is + i));
119:   }
120:   PetscCall(PetscFinalize());
121:   return 0;
122: }

124: PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, IS is, Mat aux, PetscMPIInt rank, PetscMPIInt size)
125: {
126:   IS              sizes;
127:   const PetscInt *idx;
128:   PetscViewer     viewer;
129:   char            name[PETSC_MAX_PATH_LEN];

131:   PetscFunctionBeginUser;
132:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_sizes_%d_%d.dat", prefix, identifier, rank, size));
133:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
134:   PetscCall(ISCreate(PETSC_COMM_SELF, &sizes));
135:   PetscCall(ISLoad(sizes, viewer));
136:   PetscCall(ISGetIndices(sizes, &idx));
137:   PetscCall(MatSetSizes(A, idx[0], idx[1], idx[2], idx[3]));
138:   PetscCall(MatSetUp(A));
139:   PetscCall(ISRestoreIndices(sizes, &idx));
140:   PetscCall(ISDestroy(&sizes));
141:   PetscCall(PetscViewerDestroy(&viewer));
142:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s.dat", prefix, identifier));
143:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
144:   PetscCall(MatLoad(A, viewer));
145:   PetscCall(PetscViewerDestroy(&viewer));
146:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_is_%d_%d.dat", prefix, identifier, rank, size));
147:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
148:   PetscCall(ISLoad(is, viewer));
149:   PetscCall(PetscViewerDestroy(&viewer));
150:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_aux_%d_%d.dat", prefix, identifier, rank, size));
151:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
152:   PetscCall(MatLoad(aux, viewer));
153:   PetscCall(PetscViewerDestroy(&viewer));
154:   PetscFunctionReturn(PETSC_SUCCESS);
155: }

157: /*TEST

159:    build:
160:       requires: hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)

162:    testset:
163:       requires: datafilespath
164:       nsize: 4
165:       args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_monitor -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_pc_hpddm_has_neumann -fieldsplit_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_pc_hpddm_coarse_pc_type redundant -fieldsplit_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -fieldsplit_1_pc_hpddm_coarse_correction balanced -fieldsplit_1_pc_hpddm_levels_1_eps_gen_non_hermitian -fieldsplit_1_pc_hpddm_coarse_p 2
166:       test:
167:         requires: mumps
168:         suffix: 1
169:         args: -viewer -system {{elasticity stokes}separate output} -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_mat_mumps_icntl_26 1
170:         filter: grep -v -e "action of " -e "                            " -e "block size" -e "total: nonzeros=" -e "using I-node" -e "aij" -e "transpose" -e "diagonal" -e "total number of" -e "                rows="
171:       test:
172:         requires: mumps
173:         suffix: 2
174:         output_file: output/ex87_1_system-stokes.out
175:         args: -viewer -system stokes -empty_A11 -transpose {{false true}shared output} -permute {{false true}shared output} -fieldsplit_1_pc_hpddm_ksp_pc_side right -fieldsplit_1_pc_hpddm_coarse_mat_type baij -fieldsplit_1_pc_hpddm_levels_1_sub_mat_mumps_icntl_26 1
176:         filter: grep -v -e "action of " -e "                            " -e "block size" -e "total: nonzeros=" -e "using I-node" -e "aij" -e "transpose" -e "diagonal" -e "total number of" -e "                rows=" | sed -e "s/      right preconditioning/      left preconditioning/g" -e "s/      using UNPRECONDITIONED/      using PRECONDITIONED/g"
177:       test:
178:         suffix: 1_petsc
179:         args: -system {{elasticity stokes}separate output} -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.3 -permute
180:       test:
181:         suffix: 2_petsc
182:         output_file: output/ex87_1_petsc_system-stokes.out
183:         args: -system stokes -empty_A11 -transpose -fieldsplit_1_pc_hpddm_ksp_pc_side right -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_coarse_mat_type baij -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.3 -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_shift_type inblocks
184:         filter: sed -e "s/type: transpose/type: hermitiantranspose/g"
185:       test:
186:         suffix: threshold
187:         output_file: output/ex87_1_petsc_system-elasticity.out
188:         args: -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.2 -fieldsplit_1_pc_hpddm_coarse_mat_type {{baij sbaij}shared output}

190: TEST*/