Actual source code: ex53.c
1: static char help[] = "Tests setup PCFIELDSPLIT with blocked IS.\n\n";
2: /*
3: Contributed by Hoang Giang Bui, June 2017.
4: */
5: #include <petscksp.h>
7: int main(int argc, char *argv[])
8: {
9: Mat A;
10: KSP ksp;
11: PC pc;
12: PetscInt Istart, Iend, local_m, local_n, i;
13: PetscMPIInt rank;
14: PetscInt method = 2, mat_size = 40, block_size = 2, *A_indices = NULL, *B_indices = NULL, A_size = 0, B_size = 0;
15: IS A_IS, B_IS;
17: PetscFunctionBeginUser;
18: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
19: PetscCallMPI(MPI_Comm_rank(MPI_COMM_WORLD, &rank));
21: PetscCall(PetscOptionsGetInt(NULL, NULL, "-mat_size", &mat_size, NULL));
22: PetscCall(PetscOptionsGetInt(NULL, NULL, "-method", &method, NULL));
23: PetscCall(PetscOptionsGetInt(NULL, NULL, "-block_size", &block_size, NULL));
25: if (rank == 0) PetscCall(PetscPrintf(PETSC_COMM_SELF, " matrix size = %" PetscInt_FMT ", block size = %" PetscInt_FMT "\n", mat_size, block_size));
27: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
28: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, mat_size, mat_size));
29: PetscCall(MatSetType(A, MATMPIAIJ));
30: PetscCall(MatSetUp(A));
32: PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));
34: for (i = Istart; i < Iend; ++i) {
35: PetscCall(MatSetValue(A, i, i, 2, INSERT_VALUES));
36: if (i < mat_size - 1) PetscCall(MatSetValue(A, i, i + 1, -1, INSERT_VALUES));
37: if (i > 0) PetscCall(MatSetValue(A, i, i - 1, -1, INSERT_VALUES));
38: }
40: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
41: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
43: PetscCall(MatGetLocalSize(A, &local_m, &local_n));
45: /* Create Index Sets */
46: if (rank == 0) {
47: if (method > 1) {
48: /* with method > 1, the fieldsplit B is set to zero */
49: A_size = Iend - Istart;
50: B_size = 0;
51: } else {
52: /* with method = 1, the fieldsplit A and B is equal. It is noticed that A_size, or N/4, must be divided by block_size */
53: A_size = (Iend - Istart) / 2;
54: B_size = (Iend - Istart) / 2;
55: }
56: PetscCall(PetscCalloc1(A_size, &A_indices));
57: PetscCall(PetscCalloc1(B_size, &B_indices));
58: for (i = 0; i < A_size; ++i) A_indices[i] = Istart + i;
59: for (i = 0; i < B_size; ++i) B_indices[i] = Istart + i + A_size;
60: } else if (rank == 1) {
61: A_size = (Iend - Istart) / 2;
62: B_size = (Iend - Istart) / 2;
63: PetscCall(PetscCalloc1(A_size, &A_indices));
64: PetscCall(PetscCalloc1(B_size, &B_indices));
65: for (i = 0; i < A_size; ++i) A_indices[i] = Istart + i;
66: for (i = 0; i < B_size; ++i) B_indices[i] = Istart + i + A_size;
67: }
69: PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, A_size, A_indices, PETSC_OWN_POINTER, &A_IS));
70: PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, B_size, B_indices, PETSC_OWN_POINTER, &B_IS));
71: PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[%d]: A_size = %" PetscInt_FMT ", B_size = %" PetscInt_FMT "\n", rank, A_size, B_size));
72: PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
74: /* Solve the system */
75: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
76: PetscCall(KSPSetType(ksp, KSPGMRES));
77: PetscCall(KSPSetOperators(ksp, A, A));
79: /* Define the fieldsplit for the global matrix */
80: PetscCall(KSPGetPC(ksp, &pc));
81: PetscCall(PCSetType(pc, PCFIELDSPLIT));
82: PetscCall(PCFieldSplitSetIS(pc, "a", A_IS));
83: PetscCall(PCFieldSplitSetIS(pc, "b", B_IS));
84: PetscCall(ISSetBlockSize(A_IS, block_size));
85: PetscCall(ISSetBlockSize(B_IS, block_size));
87: PetscCall(KSPSetFromOptions(ksp));
88: PetscCall(KSPSetUp(ksp));
90: PetscCall(ISDestroy(&A_IS));
91: PetscCall(ISDestroy(&B_IS));
92: PetscCall(KSPDestroy(&ksp));
93: PetscCall(MatDestroy(&A));
94: PetscCall(PetscFinalize());
95: return 0;
96: }
98: /*TEST
100: test:
101: nsize: 2
102: args: -method 1
104: test:
105: suffix: 2
106: nsize: 2
107: args: -method 2
109: TEST*/