Actual source code: ex167.c

  1: static char help[] = "Extract submatrices using unsorted indices. For SEQSBAIJ either sort both rows and columns, or sort none.\n\n";
  2: /*
  3:    Take a 4x4 grid and form a 5-point stencil graph Laplacian over it.
  4:    Partition the grid into two subdomains by splitting into two in the j-direction (slowest varying).
  5:    Impose an overlap of 1 and order the subdomains with the j-direction varying fastest.
  6:    Extract the subdomain submatrices, one per rank.
  7: */
  8: /* Results:
  9:     Sequential:
 10:     - seqaij:   will error out, if rows or columns are unsorted
 11:     - seqbaij:  will extract submatrices correctly even for unsorted row or column indices
 12:     - seqsbaij: will extract submatrices correctly even for unsorted row and column indices (both must be sorted or not);
 13:                 CANNOT automatically report inversions, because MatGetRow is not available.
 14:     MPI:
 15:     - mpiaij:   will error out, if columns are unsorted
 16:     - mpibaij:  will error out, if columns are unsorted.
 17:     - mpisbaij: will error out, if columns are unsorted; even with unsorted rows will produce correct submatrices;
 18:                 CANNOT automatically report inversions, because MatGetRow is not available.
 19: */

 21: #include <petscmat.h>
 22: #include <petscis.h>

 24: int main(int argc, char **args)
 25: {
 26:   Mat             A, *S;
 27:   IS              rowis[2], colis[2];
 28:   PetscInt        n, N, i, j, k, l, nsub, Jlow[2] = {0, 1}, *jlow, Jhigh[2] = {3, 4}, *jhigh, row, col, *subindices, ncols;
 29:   const PetscInt *cols;
 30:   PetscScalar     v;
 31:   PetscMPIInt     rank, size, p, inversions, total_inversions;
 32:   PetscBool       sort_rows, sort_cols, show_inversions;

 34:   PetscFunctionBeginUser;
 35:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 36:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 37:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 38:   PetscCheck(size < 3, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "A uniprocessor or two-processor example only.");

 40:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 41:   if (size > 1) {
 42:     n = 8;
 43:     N = 16;
 44:   } else {
 45:     n = 16;
 46:     N = 16;
 47:   }
 48:   PetscCall(MatSetSizes(A, n, n, N, N));
 49:   PetscCall(MatSetFromOptions(A));
 50:   PetscCall(MatSetUp(A));

 52:   /* Don't care if the entries are set multiple times by different procs. */
 53:   for (i = 0; i < 4; ++i) {
 54:     for (j = 0; j < 4; ++j) {
 55:       row = j * 4 + i;
 56:       v   = -1.0;
 57:       if (i > 0) {
 58:         col = row - 1;
 59:         PetscCall(MatSetValues(A, 1, &row, 1, &col, &v, INSERT_VALUES));
 60:       }
 61:       if (i < 3) {
 62:         col = row + 1;
 63:         PetscCall(MatSetValues(A, 1, &row, 1, &col, &v, INSERT_VALUES));
 64:       }
 65:       if (j > 0) {
 66:         col = row - 4;
 67:         PetscCall(MatSetValues(A, 1, &row, 1, &col, &v, INSERT_VALUES));
 68:       }
 69:       if (j < 3) {
 70:         col = row + 4;
 71:         PetscCall(MatSetValues(A, 1, &row, 1, &col, &v, INSERT_VALUES));
 72:       }
 73:       v = 4.0;
 74:       PetscCall(MatSetValues(A, 1, &row, 1, &row, &v, INSERT_VALUES));
 75:     }
 76:   }
 77:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
 78:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
 79:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Original matrix\n"));
 80:   PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));

 82:   if (size > 1) {
 83:     nsub = 1; /* one subdomain per rank */
 84:   } else {
 85:     nsub = 2; /* both subdomains on rank 0 */
 86:   }
 87:   if (rank) {
 88:     jlow  = Jlow + 1;
 89:     jhigh = Jhigh + 1;
 90:   } else {
 91:     jlow  = Jlow;
 92:     jhigh = Jhigh;
 93:   }
 94:   sort_rows = PETSC_FALSE;
 95:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-sort_rows", &sort_rows, NULL));
 96:   sort_cols = PETSC_FALSE;
 97:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-sort_cols", &sort_cols, NULL));
 98:   for (l = 0; l < nsub; ++l) {
 99:     PetscCall(PetscMalloc1(12, &subindices));
100:     k = 0;
101:     for (i = 0; i < 4; ++i) {
102:       for (j = jlow[l]; j < jhigh[l]; ++j) {
103:         subindices[k] = j * 4 + i;
104:         k++;
105:       }
106:     }
107:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 12, subindices, PETSC_OWN_POINTER, rowis + l));
108:     if ((sort_rows && !sort_cols) || (!sort_rows && sort_cols)) {
109:       PetscCall(ISDuplicate(rowis[l], colis + l));
110:     } else {
111:       PetscCall(PetscObjectReference((PetscObject)rowis[l]));
112:       colis[l] = rowis[l];
113:     }
114:     if (sort_rows) PetscCall(ISSort(rowis[l]));
115:     if (sort_cols) PetscCall(ISSort(colis[l]));
116:   }

118:   PetscCall(MatCreateSubMatrices(A, nsub, rowis, colis, MAT_INITIAL_MATRIX, &S));

120:   show_inversions = PETSC_FALSE;

122:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-show_inversions", &show_inversions, NULL));

124:   inversions = 0;
125:   for (p = 0; p < size; ++p) {
126:     if (p == rank) {
127:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%" PetscInt_FMT ":%" PetscInt_FMT "]: Number of subdomains: %" PetscInt_FMT ":\n", rank, size, nsub));
128:       for (l = 0; l < nsub; ++l) {
129:         PetscInt i0, i1;
130:         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%" PetscInt_FMT ":%" PetscInt_FMT "]: Subdomain row IS %" PetscInt_FMT ":\n", rank, size, l));
131:         PetscCall(ISView(rowis[l], PETSC_VIEWER_STDOUT_SELF));
132:         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%" PetscInt_FMT ":%" PetscInt_FMT "]: Subdomain col IS %" PetscInt_FMT ":\n", rank, size, l));
133:         PetscCall(ISView(colis[l], PETSC_VIEWER_STDOUT_SELF));
134:         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%" PetscInt_FMT ":%" PetscInt_FMT "]: Submatrix %" PetscInt_FMT ":\n", rank, size, l));
135:         PetscCall(MatView(S[l], PETSC_VIEWER_STDOUT_SELF));
136:         if (show_inversions) {
137:           PetscCall(MatGetOwnershipRange(S[l], &i0, &i1));
138:           for (i = i0; i < i1; ++i) {
139:             PetscCall(MatGetRow(S[l], i, &ncols, &cols, NULL));
140:             for (j = 1; j < ncols; ++j) {
141:               if (cols[j] < cols[j - 1]) {
142:                 PetscCall(PetscPrintf(PETSC_COMM_SELF, "***Inversion in row %" PetscInt_FMT ": col[%" PetscInt_FMT "] = %" PetscInt_FMT " < %" PetscInt_FMT " = col[%" PetscInt_FMT "]\n", i, j, cols[j], cols[j - 1], j - 1));
143:                 inversions++;
144:               }
145:             }
146:             PetscCall(MatRestoreRow(S[l], i, &ncols, &cols, NULL));
147:           }
148:         }
149:       }
150:     }
151:     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
152:   }
153:   if (show_inversions) {
154:     PetscCallMPI(MPI_Reduce(&inversions, &total_inversions, 1, MPIU_INT, MPI_SUM, 0, PETSC_COMM_WORLD));
155:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "*Total inversions: %" PetscInt_FMT "\n", total_inversions));
156:   }
157:   PetscCall(MatDestroy(&A));

159:   for (l = 0; l < nsub; ++l) {
160:     PetscCall(ISDestroy(&(rowis[l])));
161:     PetscCall(ISDestroy(&(colis[l])));
162:   }
163:   PetscCall(MatDestroySubMatrices(nsub, &S));
164:   PetscCall(PetscFinalize());
165:   return 0;
166: }