Actual source code: ex106.c

  1: static char help[] = "Test repeated LU factorizations. Used for checking memory leak\n\
  2:   -m <size> : problem size\n\
  3:   -mat_nonsym : use nonsymmetric matrix (default is symmetric)\n\n";

  5: #include <petscmat.h>
  6: int main(int argc, char **args)
  7: {
  8:   Mat           C, F;    /* matrix */
  9:   Vec           x, u, b; /* approx solution, RHS, exact solution */
 10:   PetscReal     norm;    /* norm of solution error */
 11:   PetscScalar   v, none = -1.0;
 12:   PetscInt      I, J, ldim, low, high, iglobal, Istart, Iend;
 13:   PetscInt      i, j, m = 3, n = 2, its;
 14:   PetscMPIInt   size, rank;
 15:   PetscBool     mat_nonsymmetric;
 16:   PetscInt      its_max;
 17:   MatFactorInfo factinfo;
 18:   IS            perm, iperm;

 20:   PetscFunctionBeginUser;
 21:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 22:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
 23:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 24:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 25:   n = 2 * size;

 27:   /*
 28:      Set flag if we are doing a nonsymmetric problem; the default is symmetric.
 29:   */
 30:   PetscCall(PetscOptionsHasName(NULL, NULL, "-mat_nonsym", &mat_nonsymmetric));

 32:   /*
 33:      Create parallel matrix, specifying only its global dimensions.
 34:      When using MatCreate(), the matrix format can be specified at
 35:      runtime. Also, the parallel partitioning of the matrix is
 36:      determined by PETSc at runtime.
 37:   */
 38:   PetscCall(MatCreate(PETSC_COMM_WORLD, &C));
 39:   PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
 40:   PetscCall(MatSetFromOptions(C));
 41:   PetscCall(MatGetOwnershipRange(C, &Istart, &Iend));

 43:   /*
 44:      Set matrix entries matrix in parallel.
 45:       - Each processor needs to insert only elements that it owns
 46:         locally (but any non-local elements will be sent to the
 47:         appropriate processor during matrix assembly).
 48:       - Always specify global row and columns of matrix entries.
 49:   */
 50:   for (I = Istart; I < Iend; I++) {
 51:     v = -1.0;
 52:     i = I / n;
 53:     j = I - i * n;
 54:     if (i > 0) {
 55:       J = I - n;
 56:       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
 57:     }
 58:     if (i < m - 1) {
 59:       J = I + n;
 60:       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
 61:     }
 62:     if (j > 0) {
 63:       J = I - 1;
 64:       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
 65:     }
 66:     if (j < n - 1) {
 67:       J = I + 1;
 68:       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
 69:     }
 70:     v = 4.0;
 71:     PetscCall(MatSetValues(C, 1, &I, 1, &I, &v, ADD_VALUES));
 72:   }

 74:   /*
 75:      Make the matrix nonsymmetric if desired
 76:   */
 77:   if (mat_nonsymmetric) {
 78:     for (I = Istart; I < Iend; I++) {
 79:       v = -1.5;
 80:       i = I / n;
 81:       if (i > 1) {
 82:         J = I - n - 1;
 83:         PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
 84:       }
 85:     }
 86:   } else {
 87:     PetscCall(MatSetOption(C, MAT_SYMMETRIC, PETSC_TRUE));
 88:     PetscCall(MatSetOption(C, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
 89:   }

 91:   /*
 92:      Assemble matrix, using the 2-step process:
 93:        MatAssemblyBegin(), MatAssemblyEnd()
 94:      Computations can be done while messages are in transition
 95:      by placing code between these two statements.
 96:   */
 97:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
 98:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));

100:   its_max = 1000;
101:   /*
102:      Create parallel vectors.
103:       - When using VecSetSizes(), we specify only the vector's global
104:         dimension; the parallel partitioning is determined at runtime.
105:       - Note: We form 1 vector from scratch and then duplicate as needed.
106:   */
107:   PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
108:   PetscCall(VecSetSizes(u, PETSC_DECIDE, m * n));
109:   PetscCall(VecSetFromOptions(u));
110:   PetscCall(VecDuplicate(u, &b));
111:   PetscCall(VecDuplicate(b, &x));

113:   /*
114:      Currently, all parallel PETSc vectors are partitioned by
115:      contiguous chunks across the processors.  Determine which
116:      range of entries are locally owned.
117:   */
118:   PetscCall(VecGetOwnershipRange(x, &low, &high));

120:   /*
121:     Set elements within the exact solution vector in parallel.
122:      - Each processor needs to insert only elements that it owns
123:        locally (but any non-local entries will be sent to the
124:        appropriate processor during vector assembly).
125:      - Always specify global locations of vector entries.
126:   */
127:   PetscCall(VecGetLocalSize(x, &ldim));
128:   for (i = 0; i < ldim; i++) {
129:     iglobal = i + low;
130:     v       = (PetscScalar)(i + 100 * rank);
131:     PetscCall(VecSetValues(u, 1, &iglobal, &v, INSERT_VALUES));
132:   }

134:   /*
135:      Assemble vector, using the 2-step process:
136:        VecAssemblyBegin(), VecAssemblyEnd()
137:      Computations can be done while messages are in transition,
138:      by placing code between these two statements.
139:   */
140:   PetscCall(VecAssemblyBegin(u));
141:   PetscCall(VecAssemblyEnd(u));

143:   /* Compute right-hand-side vector */
144:   PetscCall(MatMult(C, u, b));

146:   PetscCall(MatGetOrdering(C, MATORDERINGNATURAL, &perm, &iperm));
147:   its_max = 2000;
148:   for (i = 0; i < its_max; i++) {
149:     PetscCall(MatGetFactor(C, MATSOLVERPETSC, MAT_FACTOR_LU, &F));
150:     PetscCall(MatLUFactorSymbolic(F, C, perm, iperm, &factinfo));
151:     for (j = 0; j < 1; j++) PetscCall(MatLUFactorNumeric(F, C, &factinfo));
152:     PetscCall(MatSolve(F, b, x));
153:     PetscCall(MatDestroy(&F));
154:   }
155:   PetscCall(ISDestroy(&perm));
156:   PetscCall(ISDestroy(&iperm));

158:   /* Check the error */
159:   PetscCall(VecAXPY(x, none, u));
160:   PetscCall(VecNorm(x, NORM_2, &norm));
161:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %t\n", (double)norm));

163:   /* Free work space. */
164:   PetscCall(VecDestroy(&u));
165:   PetscCall(VecDestroy(&x));
166:   PetscCall(VecDestroy(&b));
167:   PetscCall(MatDestroy(&C));
168:   PetscCall(PetscFinalize());
169:   return 0;
170: }