Actual source code: ex110.c

  1: static char help[] = "Testing MatCreateMPIAIJWithSplitArrays().\n\n";

  3: #include <petscmat.h>

  5: int main(int argc, char **argv)
  6: {
  7:   Mat             A, B;
  8:   PetscInt        i, j, column, *ooj;
  9:   PetscInt       *di, *dj, *oi, *oj, nd;
 10:   const PetscInt *garray;
 11:   PetscScalar    *oa, *da;
 12:   PetscScalar     value;
 13:   PetscRandom     rctx;
 14:   PetscBool       equal, done;
 15:   Mat             AA, AB;
 16:   PetscMPIInt     size, rank;

 18:   PetscFunctionBeginUser;
 19:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 20:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 21:   PetscCheck(size > 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must run with 2 or more processes");
 22:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));

 24:   /* Create a mpiaij matrix for checking */
 25:   PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, 5, 5, PETSC_DECIDE, PETSC_DECIDE, 0, NULL, 0, NULL, &A));
 26:   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
 27:   PetscCall(MatSetUp(A));
 28:   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
 29:   PetscCall(PetscRandomSetFromOptions(rctx));

 31:   for (i = 5 * rank; i < 5 * rank + 5; i++) {
 32:     for (j = 0; j < 5 * size; j++) {
 33:       PetscCall(PetscRandomGetValue(rctx, &value));
 34:       column = (PetscInt)(5 * size * PetscRealPart(value));
 35:       PetscCall(PetscRandomGetValue(rctx, &value));
 36:       PetscCall(MatSetValues(A, 1, &i, 1, &column, &value, INSERT_VALUES));
 37:     }
 38:   }
 39:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
 40:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

 42:   PetscCall(MatMPIAIJGetSeqAIJ(A, &AA, &AB, &garray));
 43:   PetscCall(MatGetRowIJ(AA, 0, PETSC_FALSE, PETSC_FALSE, &nd, (const PetscInt **)&di, (const PetscInt **)&dj, &done));
 44:   PetscCall(MatSeqAIJGetArray(AA, &da));
 45:   PetscCall(MatGetRowIJ(AB, 0, PETSC_FALSE, PETSC_FALSE, &nd, (const PetscInt **)&oi, (const PetscInt **)&oj, &done));
 46:   PetscCall(MatSeqAIJGetArray(AB, &oa));

 48:   PetscCall(PetscMalloc1(oi[5], &ooj));
 49:   PetscCall(PetscArraycpy(ooj, oj, oi[5]));
 50:   /* modify the column entries in the non-diagonal portion back to global numbering */
 51:   for (i = 0; i < oi[5]; i++) ooj[i] = garray[ooj[i]];

 53:   PetscCall(MatCreateMPIAIJWithSplitArrays(PETSC_COMM_WORLD, 5, 5, PETSC_DETERMINE, PETSC_DETERMINE, di, dj, da, oi, ooj, oa, &B));
 54:   PetscCall(MatSetUp(B));
 55:   PetscCall(MatEqual(A, B, &equal));

 57:   PetscCall(MatRestoreRowIJ(AA, 0, PETSC_FALSE, PETSC_FALSE, &nd, (const PetscInt **)&di, (const PetscInt **)&dj, &done));
 58:   PetscCall(MatSeqAIJRestoreArray(AA, &da));
 59:   PetscCall(MatRestoreRowIJ(AB, 0, PETSC_FALSE, PETSC_FALSE, &nd, (const PetscInt **)&oi, (const PetscInt **)&oj, &done));
 60:   PetscCall(MatSeqAIJRestoreArray(AB, &oa));

 62:   PetscCheck(equal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Likely a bug in MatCreateMPIAIJWithSplitArrays()");

 64:   /* Free spaces */
 65:   PetscCall(PetscFree(ooj));
 66:   PetscCall(PetscRandomDestroy(&rctx));
 67:   PetscCall(MatDestroy(&A));
 68:   PetscCall(MatDestroy(&B));
 69:   PetscCall(PetscFree(oj));
 70:   PetscCall(PetscFinalize());
 71:   return 0;
 72: }

 74: /*TEST

 76:    test:
 77:       nsize: 3

 79: TEST*/