Actual source code: ex73.c

  1: static char help[] = "Reads a PETSc matrix from a file partitions it\n\n";

  3: /*
  4:   Include "petscmat.h" so that we can use matrices.  Note that this file
  5:   automatically includes:
  6:      petscsys.h       - base PETSc routines   petscvec.h - vectors
  7:      petscmat.h - matrices
  8:      petscis.h     - index sets
  9:      petscviewer.h - viewers

 11:   Example of usage:
 12:     mpiexec -n 3 ex73 -f <matfile> -mat_partitioning_type parmetis/scotch -viewer_binary_skip_info -nox
 13: */
 14: #include <petscmat.h>

 16: int main(int argc, char **args)
 17: {
 18:   MatType         mtype = MATMPIAIJ;        /* matrix format */
 19:   Mat             A, B;                     /* matrix */
 20:   PetscViewer     fd;                       /* viewer */
 21:   char            file[PETSC_MAX_PATH_LEN]; /* input file name */
 22:   PetscBool       flg, viewMats, viewIS, viewVecs, useND, noVecLoad = PETSC_FALSE;
 23:   PetscInt       *nlocal, m, n;
 24:   PetscMPIInt     rank, size;
 25:   MatPartitioning part;
 26:   IS              is, isn;
 27:   Vec             xin, xout;
 28:   VecScatter      scat;

 30:   PetscFunctionBeginUser;
 31:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 32:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 33:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 34:   PetscCall(PetscOptionsHasName(NULL, NULL, "-view_mats", &viewMats));
 35:   PetscCall(PetscOptionsHasName(NULL, NULL, "-view_is", &viewIS));
 36:   PetscCall(PetscOptionsHasName(NULL, NULL, "-view_vecs", &viewVecs));
 37:   PetscCall(PetscOptionsHasName(NULL, NULL, "-use_nd", &useND));
 38:   PetscCall(PetscOptionsHasName(NULL, NULL, "-novec_load", &noVecLoad));

 40:   /*
 41:      Determine file from which we read the matrix
 42:   */
 43:   PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), &flg));

 45:   /*
 46:        Open binary file.  Note that we use FILE_MODE_READ to indicate
 47:        reading from this file.
 48:   */
 49:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));

 51:   /*
 52:       Load the matrix and vector; then destroy the viewer.
 53:   */
 54:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 55:   PetscCall(MatSetType(A, mtype));
 56:   PetscCall(MatLoad(A, fd));
 57:   if (!noVecLoad) {
 58:     PetscCall(VecCreate(PETSC_COMM_WORLD, &xin));
 59:     PetscCall(VecLoad(xin, fd));
 60:   } else {
 61:     PetscCall(MatCreateVecs(A, &xin, NULL));
 62:     PetscCall(VecSetRandom(xin, NULL));
 63:   }
 64:   PetscCall(PetscViewerDestroy(&fd));
 65:   if (viewMats) {
 66:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Original matrix:\n"));
 67:     PetscCall(MatView(A, PETSC_VIEWER_DRAW_WORLD));
 68:   }
 69:   if (viewVecs) {
 70:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Original vector:\n"));
 71:     PetscCall(VecView(xin, PETSC_VIEWER_STDOUT_WORLD));
 72:   }

 74:   /* Partition the graph of the matrix */
 75:   PetscCall(MatPartitioningCreate(PETSC_COMM_WORLD, &part));
 76:   PetscCall(MatPartitioningSetAdjacency(part, A));
 77:   PetscCall(MatPartitioningSetFromOptions(part));

 79:   /* get new processor owner number of each vertex */
 80:   if (useND) {
 81:     PetscCall(MatPartitioningApplyND(part, &is));
 82:   } else {
 83:     PetscCall(MatPartitioningApply(part, &is));
 84:   }
 85:   if (viewIS) {
 86:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "IS1 - new processor ownership:\n"));
 87:     PetscCall(ISView(is, PETSC_VIEWER_STDOUT_WORLD));
 88:   }

 90:   /* get new global number of each old global number */
 91:   PetscCall(ISPartitioningToNumbering(is, &isn));
 92:   if (viewIS) {
 93:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "IS2 - new global numbering:\n"));
 94:     PetscCall(ISView(isn, PETSC_VIEWER_STDOUT_WORLD));
 95:   }

 97:   /* get number of new vertices for each processor */
 98:   PetscCall(PetscMalloc1(size, &nlocal));
 99:   PetscCall(ISPartitioningCount(is, size, nlocal));
100:   PetscCall(ISDestroy(&is));

102:   /* get old global number of each new global number */
103:   PetscCall(ISInvertPermutation(isn, useND ? PETSC_DECIDE : nlocal[rank], &is));
104:   if (viewIS) {
105:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "IS3=inv(IS2) - old global number of each new global number:\n"));
106:     PetscCall(ISView(is, PETSC_VIEWER_STDOUT_WORLD));
107:   }

109:   /* move the matrix rows to the new processes they have been assigned to by the permutation */
110:   PetscCall(MatCreateSubMatrix(A, is, is, MAT_INITIAL_MATRIX, &B));
111:   PetscCall(PetscFree(nlocal));
112:   PetscCall(ISDestroy(&isn));
113:   PetscCall(MatDestroy(&A));
114:   PetscCall(MatPartitioningDestroy(&part));
115:   if (viewMats) {
116:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Partitioned matrix:\n"));
117:     PetscCall(MatView(B, PETSC_VIEWER_DRAW_WORLD));
118:   }

120:   /* move the vector rows to the new processes they have been assigned to */
121:   PetscCall(MatGetLocalSize(B, &m, &n));
122:   PetscCall(VecCreateFromOptions(PETSC_COMM_WORLD, NULL, 1, m, PETSC_DECIDE, &xout));
123:   PetscCall(VecScatterCreate(xin, is, xout, NULL, &scat));
124:   PetscCall(VecScatterBegin(scat, xin, xout, INSERT_VALUES, SCATTER_FORWARD));
125:   PetscCall(VecScatterEnd(scat, xin, xout, INSERT_VALUES, SCATTER_FORWARD));
126:   PetscCall(VecScatterDestroy(&scat));
127:   if (viewVecs) {
128:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Mapped vector:\n"));
129:     PetscCall(VecView(xout, PETSC_VIEWER_STDOUT_WORLD));
130:   }
131:   PetscCall(VecDestroy(&xout));
132:   PetscCall(ISDestroy(&is));

134:   {
135:     PetscInt           rstart, i, *nzd, *nzo, nzl, nzmax = 0, *ncols, nrow, j;
136:     Mat                J;
137:     const PetscInt    *cols;
138:     const PetscScalar *vals;
139:     PetscScalar       *nvals;

141:     PetscCall(MatGetOwnershipRange(B, &rstart, NULL));
142:     PetscCall(PetscCalloc2(2 * m, &nzd, 2 * m, &nzo));
143:     for (i = 0; i < m; i++) {
144:       PetscCall(MatGetRow(B, i + rstart, &nzl, &cols, NULL));
145:       for (j = 0; j < nzl; j++) {
146:         if (cols[j] >= rstart && cols[j] < rstart + n) {
147:           nzd[2 * i] += 2;
148:           nzd[2 * i + 1] += 2;
149:         } else {
150:           nzo[2 * i] += 2;
151:           nzo[2 * i + 1] += 2;
152:         }
153:       }
154:       nzmax = PetscMax(nzmax, nzd[2 * i] + nzo[2 * i]);
155:       PetscCall(MatRestoreRow(B, i + rstart, &nzl, &cols, NULL));
156:     }
157:     PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, 2 * m, 2 * m, PETSC_DECIDE, PETSC_DECIDE, 0, nzd, 0, nzo, &J));
158:     PetscCall(PetscInfo(0, "Created empty Jacobian matrix\n"));
159:     PetscCall(PetscFree2(nzd, nzo));
160:     PetscCall(PetscMalloc2(nzmax, &ncols, nzmax, &nvals));
161:     PetscCall(PetscArrayzero(nvals, nzmax));
162:     for (i = 0; i < m; i++) {
163:       PetscCall(MatGetRow(B, i + rstart, &nzl, &cols, &vals));
164:       for (j = 0; j < nzl; j++) {
165:         ncols[2 * j]     = 2 * cols[j];
166:         ncols[2 * j + 1] = 2 * cols[j] + 1;
167:       }
168:       nrow = 2 * (i + rstart);
169:       PetscCall(MatSetValues(J, 1, &nrow, 2 * nzl, ncols, nvals, INSERT_VALUES));
170:       nrow = 2 * (i + rstart) + 1;
171:       PetscCall(MatSetValues(J, 1, &nrow, 2 * nzl, ncols, nvals, INSERT_VALUES));
172:       PetscCall(MatRestoreRow(B, i + rstart, &nzl, &cols, &vals));
173:     }
174:     PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
175:     PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
176:     if (viewMats) {
177:       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Jacobian matrix structure:\n"));
178:       PetscCall(MatView(J, PETSC_VIEWER_DRAW_WORLD));
179:     }
180:     PetscCall(MatDestroy(&J));
181:     PetscCall(PetscFree2(ncols, nvals));
182:   }

184:   /*
185:        Free work space.  All PETSc objects should be destroyed when they
186:        are no longer needed.
187:   */
188:   PetscCall(MatDestroy(&B));
189:   PetscCall(VecDestroy(&xin));
190:   PetscCall(PetscFinalize());
191:   return 0;
192: }

194: /*TEST

196:    test:
197:       nsize: 3
198:       requires: parmetis datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
199:       args: -nox -f ${DATAFILESPATH}/matrices/arco1 -mat_partitioning_type parmetis -viewer_binary_skip_info -novec_load

201:    test:
202:       requires: parmetis !complex double !defined(PETSC_USE_64BIT_INDICES)
203:       output_file: output/ex73_1.out
204:       suffix: parmetis_nd_32
205:       nsize: 3
206:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load

208:    test:
209:       requires: parmetis !complex double defined(PETSC_USE_64BIT_INDICES)
210:       output_file: output/ex73_1.out
211:       suffix: parmetis_nd_64
212:       nsize: 3
213:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load

215:    test:
216:       requires: ptscotch !complex double !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
217:       output_file: output/ex73_1.out
218:       suffix: ptscotch_nd_32
219:       nsize: 4
220:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load

222:    test:
223:       requires: ptscotch !complex double defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
224:       output_file: output/ex73_1.out
225:       suffix: ptscotch_nd_64
226:       nsize: 4
227:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load

229: TEST*/