Actual source code: ex82.c

  1: static char help[] = "Partition a tiny grid using hierarchical partitioning.\n\n";

  3: /*
  4:   Include "petscmat.h" so that we can use matrices.  Note that this file
  5:   automatically includes:
  6:      petscsys.h       - base PETSc routines   petscvec.h - vectors
  7:      petscmat.h - matrices
  8:      petscis.h     - index sets
  9:      petscviewer.h - viewers
 10: */
 11: #include <petscmat.h>

 13: int main(int argc, char **args)
 14: {
 15:   Mat             A;
 16:   PetscMPIInt     rank, size;
 17:   PetscInt       *ia, *ja;
 18:   MatPartitioning part;
 19:   IS              is, isn, isrows;
 20:   IS              coarseparts, fineparts;
 21:   MPI_Comm        comm;

 23:   PetscFunctionBeginUser;
 24:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 25:   comm = PETSC_COMM_WORLD;
 26:   PetscCallMPI(MPI_Comm_size(comm, &size));
 27:   PetscCheck(size == 4, comm, PETSC_ERR_WRONG_MPI_SIZE, "Must run with 4 processors");
 28:   PetscCallMPI(MPI_Comm_rank(comm, &rank));

 30:   PetscCall(PetscMalloc1(5, &ia));
 31:   PetscCall(PetscMalloc1(16, &ja));
 32:   if (rank == 0) {
 33:     ja[0] = 1;
 34:     ja[1] = 4;
 35:     ja[2] = 0;
 36:     ja[3] = 2;
 37:     ja[4] = 5;
 38:     ja[5] = 1;
 39:     ja[6] = 3;
 40:     ja[7] = 6;
 41:     ja[8] = 2;
 42:     ja[9] = 7;
 43:     ia[0] = 0;
 44:     ia[1] = 2;
 45:     ia[2] = 5;
 46:     ia[3] = 8;
 47:     ia[4] = 10;
 48:   } else if (rank == 1) {
 49:     ja[0]  = 0;
 50:     ja[1]  = 5;
 51:     ja[2]  = 8;
 52:     ja[3]  = 1;
 53:     ja[4]  = 4;
 54:     ja[5]  = 6;
 55:     ja[6]  = 9;
 56:     ja[7]  = 2;
 57:     ja[8]  = 5;
 58:     ja[9]  = 7;
 59:     ja[10] = 10;
 60:     ja[11] = 3;
 61:     ja[12] = 6;
 62:     ja[13] = 11;
 63:     ia[0]  = 0;
 64:     ia[1]  = 3;
 65:     ia[2]  = 7;
 66:     ia[3]  = 11;
 67:     ia[4]  = 14;
 68:   } else if (rank == 2) {
 69:     ja[0]  = 4;
 70:     ja[1]  = 9;
 71:     ja[2]  = 12;
 72:     ja[3]  = 5;
 73:     ja[4]  = 8;
 74:     ja[5]  = 10;
 75:     ja[6]  = 13;
 76:     ja[7]  = 6;
 77:     ja[8]  = 9;
 78:     ja[9]  = 11;
 79:     ja[10] = 14;
 80:     ja[11] = 7;
 81:     ja[12] = 10;
 82:     ja[13] = 15;
 83:     ia[0]  = 0;
 84:     ia[1]  = 3;
 85:     ia[2]  = 7;
 86:     ia[3]  = 11;
 87:     ia[4]  = 14;
 88:   } else {
 89:     ja[0] = 8;
 90:     ja[1] = 13;
 91:     ja[2] = 9;
 92:     ja[3] = 12;
 93:     ja[4] = 14;
 94:     ja[5] = 10;
 95:     ja[6] = 13;
 96:     ja[7] = 15;
 97:     ja[8] = 11;
 98:     ja[9] = 14;
 99:     ia[0] = 0;
100:     ia[1] = 2;
101:     ia[2] = 5;
102:     ia[3] = 8;
103:     ia[4] = 10;
104:   }
105:   PetscCall(MatCreateMPIAdj(comm, 4, 16, ia, ja, NULL, &A));
106:   PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));
107:   /*
108:    Partition the graph of the matrix
109:   */
110:   PetscCall(MatPartitioningCreate(comm, &part));
111:   PetscCall(MatPartitioningSetAdjacency(part, A));
112:   PetscCall(MatPartitioningSetType(part, MATPARTITIONINGHIERARCH));
113:   PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part, 2));
114:   PetscCall(MatPartitioningHierarchicalSetNfineparts(part, 2));
115:   PetscCall(MatPartitioningSetFromOptions(part));
116:   /* get new processor owner number of each vertex */
117:   PetscCall(MatPartitioningApply(part, &is));
118:   /* coarse parts */
119:   PetscCall(MatPartitioningHierarchicalGetCoarseparts(part, &coarseparts));
120:   PetscCall(ISView(coarseparts, PETSC_VIEWER_STDOUT_WORLD));
121:   /* fine parts */
122:   PetscCall(MatPartitioningHierarchicalGetFineparts(part, &fineparts));
123:   PetscCall(ISView(fineparts, PETSC_VIEWER_STDOUT_WORLD));
124:   /* partitioning */
125:   PetscCall(ISView(is, PETSC_VIEWER_STDOUT_WORLD));
126:   /* get new global number of each old global number */
127:   PetscCall(ISPartitioningToNumbering(is, &isn));
128:   PetscCall(ISView(isn, PETSC_VIEWER_STDOUT_WORLD));
129:   PetscCall(ISBuildTwoSided(is, NULL, &isrows));
130:   PetscCall(ISView(isrows, PETSC_VIEWER_STDOUT_WORLD));
131:   PetscCall(ISDestroy(&is));
132:   PetscCall(ISDestroy(&coarseparts));
133:   PetscCall(ISDestroy(&fineparts));
134:   PetscCall(ISDestroy(&isrows));
135:   PetscCall(ISDestroy(&isn));
136:   PetscCall(MatPartitioningDestroy(&part));
137:   /*
138:     Free work space.  All PETSc objects should be destroyed when they
139:     are no longer needed.
140:   */
141:   PetscCall(MatDestroy(&A));
142:   PetscCall(PetscFinalize());
143:   return 0;
144: }

146: /*TEST

148:    test:
149:       nsize: 4
150:       requires: parmetis
151:       TODO: tests cannot use parmetis because it produces different results on different machines

153: TEST*/