Actual source code: ex83.c

  1: static char help[] = "Partition tiny grid using hierarchical partitioning and increase overlap using MatIncreaseOverlapSplit.\n\n";

  3: /*
  4:   Include "petscmat.h" so that we can use matrices.  Note that this file
  5:   automatically includes:
  6:      petscsys.h       - base PETSc routines   petscvec.h - vectors
  7:      petscmat.h - matrices
  8:      petscis.h     - index sets
  9:      petscviewer.h - viewers
 10: */
 11: #include <petscmat.h>

 13: int main(int argc, char **args)
 14: {
 15:   Mat             A, B;
 16:   PetscMPIInt     rank, size, membershipKey;
 17:   PetscInt       *ia, *ja, *indices_sc, isrows_localsize;
 18:   const PetscInt *indices;
 19:   MatPartitioning part;
 20:   IS              is, isrows, isrows_sc;
 21:   IS              coarseparts, fineparts;
 22:   MPI_Comm        comm, scomm;

 24:   PetscFunctionBeginUser;
 25:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 26:   comm = PETSC_COMM_WORLD;
 27:   PetscCallMPI(MPI_Comm_size(comm, &size));
 28:   PetscCheck(size == 4, comm, PETSC_ERR_WRONG_MPI_SIZE, "Must run with 4 processors ");
 29:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 30:   /*set a small matrix */
 31:   PetscCall(PetscMalloc1(5, &ia));
 32:   PetscCall(PetscMalloc1(16, &ja));
 33:   if (rank == 0) {
 34:     ja[0]         = 1;
 35:     ja[1]         = 4;
 36:     ja[2]         = 0;
 37:     ja[3]         = 2;
 38:     ja[4]         = 5;
 39:     ja[5]         = 1;
 40:     ja[6]         = 3;
 41:     ja[7]         = 6;
 42:     ja[8]         = 2;
 43:     ja[9]         = 7;
 44:     ia[0]         = 0;
 45:     ia[1]         = 2;
 46:     ia[2]         = 5;
 47:     ia[3]         = 8;
 48:     ia[4]         = 10;
 49:     membershipKey = 0;
 50:   } else if (rank == 1) {
 51:     ja[0]         = 0;
 52:     ja[1]         = 5;
 53:     ja[2]         = 8;
 54:     ja[3]         = 1;
 55:     ja[4]         = 4;
 56:     ja[5]         = 6;
 57:     ja[6]         = 9;
 58:     ja[7]         = 2;
 59:     ja[8]         = 5;
 60:     ja[9]         = 7;
 61:     ja[10]        = 10;
 62:     ja[11]        = 3;
 63:     ja[12]        = 6;
 64:     ja[13]        = 11;
 65:     ia[0]         = 0;
 66:     ia[1]         = 3;
 67:     ia[2]         = 7;
 68:     ia[3]         = 11;
 69:     ia[4]         = 14;
 70:     membershipKey = 0;
 71:   } else if (rank == 2) {
 72:     ja[0]         = 4;
 73:     ja[1]         = 9;
 74:     ja[2]         = 12;
 75:     ja[3]         = 5;
 76:     ja[4]         = 8;
 77:     ja[5]         = 10;
 78:     ja[6]         = 13;
 79:     ja[7]         = 6;
 80:     ja[8]         = 9;
 81:     ja[9]         = 11;
 82:     ja[10]        = 14;
 83:     ja[11]        = 7;
 84:     ja[12]        = 10;
 85:     ja[13]        = 15;
 86:     ia[0]         = 0;
 87:     ia[1]         = 3;
 88:     ia[2]         = 7;
 89:     ia[3]         = 11;
 90:     ia[4]         = 14;
 91:     membershipKey = 1;
 92:   } else {
 93:     ja[0]         = 8;
 94:     ja[1]         = 13;
 95:     ja[2]         = 9;
 96:     ja[3]         = 12;
 97:     ja[4]         = 14;
 98:     ja[5]         = 10;
 99:     ja[6]         = 13;
100:     ja[7]         = 15;
101:     ja[8]         = 11;
102:     ja[9]         = 14;
103:     ia[0]         = 0;
104:     ia[1]         = 2;
105:     ia[2]         = 5;
106:     ia[3]         = 8;
107:     ia[4]         = 10;
108:     membershipKey = 1;
109:   }
110:   PetscCall(MatCreateMPIAdj(comm, 4, 16, ia, ja, NULL, &A));
111:   PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));
112:   /*
113:    Partition the graph of the matrix
114:   */
115:   PetscCall(MatPartitioningCreate(comm, &part));
116:   PetscCall(MatPartitioningSetAdjacency(part, A));
117:   PetscCall(MatPartitioningSetType(part, MATPARTITIONINGHIERARCH));
118:   PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part, 2));
119:   PetscCall(MatPartitioningHierarchicalSetNfineparts(part, 2));
120:   PetscCall(MatPartitioningSetFromOptions(part));
121:   /* get new processor owner number of each vertex */
122:   PetscCall(MatPartitioningApply(part, &is));
123:   /* coarse parts */
124:   PetscCall(MatPartitioningHierarchicalGetCoarseparts(part, &coarseparts));
125:   PetscCall(ISView(coarseparts, PETSC_VIEWER_STDOUT_WORLD));
126:   /* fine parts */
127:   PetscCall(MatPartitioningHierarchicalGetFineparts(part, &fineparts));
128:   PetscCall(ISView(fineparts, PETSC_VIEWER_STDOUT_WORLD));
129:   /* partitioning */
130:   PetscCall(ISView(is, PETSC_VIEWER_STDOUT_WORLD));
131:   /* compute coming rows */
132:   PetscCall(ISBuildTwoSided(is, NULL, &isrows));
133:   PetscCall(ISView(isrows, PETSC_VIEWER_STDOUT_WORLD));
134:   /*create a sub-communicator */
135:   PetscCallMPI(MPI_Comm_split(comm, membershipKey, rank, &scomm));
136:   PetscCall(ISGetLocalSize(isrows, &isrows_localsize));
137:   PetscCall(PetscMalloc1(isrows_localsize, &indices_sc));
138:   PetscCall(ISGetIndices(isrows, &indices));
139:   PetscCall(PetscArraycpy(indices_sc, indices, isrows_localsize));
140:   PetscCall(ISRestoreIndices(isrows, &indices));
141:   PetscCall(ISDestroy(&is));
142:   PetscCall(ISDestroy(&coarseparts));
143:   PetscCall(ISDestroy(&fineparts));
144:   PetscCall(ISDestroy(&isrows));
145:   PetscCall(MatPartitioningDestroy(&part));
146:   /*create a sub-IS on the sub communicator  */
147:   PetscCall(ISCreateGeneral(scomm, isrows_localsize, indices_sc, PETSC_OWN_POINTER, &isrows_sc));
148:   PetscCall(MatConvert(A, MATMPIAIJ, MAT_INITIAL_MATRIX, &B));
149: #if 1
150:   PetscCall(MatView(B, PETSC_VIEWER_STDOUT_WORLD));
151: #endif
152:   /*increase overlap */
153:   PetscCall(MatIncreaseOverlapSplit(B, 1, &isrows_sc, 1));
154:   PetscCall(ISView(isrows_sc, NULL));
155:   PetscCall(ISDestroy(&isrows_sc));
156:   /*
157:     Free work space.  All PETSc objects should be destroyed when they
158:     are no longer needed.
159:   */
160:   PetscCall(MatDestroy(&A));
161:   PetscCall(MatDestroy(&B));
162:   PetscCall(PetscFinalize());
163:   return 0;
164: }