Actual source code: ex204.c

  1: static char help[] = "Test ViennaCL Matrix Conversions";

  3: #include <petscmat.h>

  5: int main(int argc, char **args)
  6: {
  7:   PetscMPIInt rank, size;

  9:   PetscFunctionBeginUser;
 10:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));

 12:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 13:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));

 15:   /* Construct a sequential ViennaCL matrix and vector */
 16:   if (rank == 0) {
 17:     Mat               A_vcl;
 18:     Vec               v_vcl, r_vcl;
 19:     PetscInt          n = 17, m = 31, nz = 5, i, cnt, j;
 20:     const PetscScalar val = 1.0;

 22:     PetscCall(MatCreateSeqAIJViennaCL(PETSC_COMM_SELF, m, n, nz, NULL, &A_vcl));

 24:     /* Add nz arbitrary entries per row in arbitrary columns */
 25:     for (i = 0; i < m; ++i) {
 26:       for (cnt = 0; cnt < nz; ++cnt) {
 27:         j = (19 * cnt + (7 * i + 3)) % n;
 28:         PetscCall(MatSetValue(A_vcl, i, j, (PetscScalar)(0.3 * i + j), INSERT_VALUES));
 29:       }
 30:     }
 31:     PetscCall(MatAssemblyBegin(A_vcl, MAT_FINAL_ASSEMBLY));
 32:     PetscCall(MatAssemblyEnd(A_vcl, MAT_FINAL_ASSEMBLY));

 34:     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF, n, &v_vcl));
 35:     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF, m, &r_vcl));
 36:     PetscCall(VecSet(v_vcl, val));

 38:     PetscCall(MatMult(A_vcl, v_vcl, r_vcl));

 40:     PetscCall(VecDestroy(&v_vcl));
 41:     PetscCall(VecDestroy(&r_vcl));
 42:     PetscCall(MatDestroy(&A_vcl));
 43:   }

 45:   /* Create a sequential AIJ matrix on rank 0 convert it to a new ViennaCL matrix, and apply it to a ViennaCL vector */
 46:   if (rank == 0) {
 47:     Mat               A, A_vcl;
 48:     Vec               v, r, v_vcl, r_vcl, d_vcl;
 49:     PetscInt          n = 17, m = 31, nz = 5, i, cnt, j;
 50:     const PetscScalar val = 1.0;
 51:     PetscReal         dnorm;
 52:     const PetscReal   tol = 1e-5;

 54:     PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, m, n, nz, NULL, &A));

 56:     /* Add nz arbitrary entries per row in arbitrary columns */
 57:     for (i = 0; i < m; ++i) {
 58:       for (cnt = 0; cnt < nz; ++cnt) {
 59:         j = (19 * cnt + (7 * i + 3)) % n;
 60:         PetscCall(MatSetValue(A, i, j, (PetscScalar)(0.3 * i + j), INSERT_VALUES));
 61:       }
 62:     }
 63:     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
 64:     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

 66:     PetscCall(PetscObjectSetName((PetscObject)A, "Sequential CPU Matrix"));

 68:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, n, &v));
 69:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &r));
 70:     PetscCall(PetscObjectSetName((PetscObject)r, "CPU result vector"));
 71:     PetscCall(VecSet(v, val));
 72:     PetscCall(MatMult(A, v, r));

 74:     PetscCall(MatConvert(A, MATSEQAIJVIENNACL, MAT_INITIAL_MATRIX, &A_vcl));
 75:     PetscCall(PetscObjectSetName((PetscObject)A_vcl, "New ViennaCL Matrix"));

 77:     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF, n, &v_vcl));
 78:     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF, m, &r_vcl));
 79:     PetscCall(PetscObjectSetName((PetscObject)r_vcl, "ViennaCL result vector"));
 80:     PetscCall(VecSet(v_vcl, val));
 81:     PetscCall(MatMult(A_vcl, v_vcl, r_vcl));

 83:     PetscCall(VecDuplicate(r_vcl, &d_vcl));
 84:     PetscCall(VecCopy(r_vcl, d_vcl));
 85:     PetscCall(VecAXPY(d_vcl, -1.0, r_vcl));
 86:     PetscCall(VecNorm(d_vcl, NORM_INFINITY, &dnorm));
 87:     PetscCheck(dnorm <= tol, PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "Sequential CPU and MPI ViennaCL vector results incompatible with inf norm greater than tolerance of %g", tol);

 89:     PetscCall(VecDestroy(&v));
 90:     PetscCall(VecDestroy(&r));
 91:     PetscCall(VecDestroy(&v_vcl));
 92:     PetscCall(VecDestroy(&r_vcl));
 93:     PetscCall(VecDestroy(&d_vcl));
 94:     PetscCall(MatDestroy(&A));
 95:     PetscCall(MatDestroy(&A_vcl));
 96:   }

 98:   /* Create a sequential AIJ matrix on rank 0 convert it inplace to a new ViennaCL matrix, and apply it to a ViennaCL vector */
 99:   if (rank == 0) {
100:     Mat               A;
101:     Vec               v, r, v_vcl, r_vcl, d_vcl;
102:     PetscInt          n = 17, m = 31, nz = 5, i, cnt, j;
103:     const PetscScalar val = 1.0;
104:     PetscReal         dnorm;
105:     const PetscReal   tol = 1e-5;

107:     PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, m, n, nz, NULL, &A));

109:     /* Add nz arbitrary entries per row in arbitrary columns */
110:     for (i = 0; i < m; ++i) {
111:       for (cnt = 0; cnt < nz; ++cnt) {
112:         j = (19 * cnt + (7 * i + 3)) % n;
113:         PetscCall(MatSetValue(A, i, j, (PetscScalar)(0.3 * i + j), INSERT_VALUES));
114:       }
115:     }
116:     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
117:     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

119:     PetscCall(PetscObjectSetName((PetscObject)A, "Sequential CPU Matrix"));

121:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, n, &v));
122:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &r));
123:     PetscCall(PetscObjectSetName((PetscObject)r, "CPU result vector"));
124:     PetscCall(VecSet(v, val));
125:     PetscCall(MatMult(A, v, r));

127:     PetscCall(MatConvert(A, MATSEQAIJVIENNACL, MAT_INPLACE_MATRIX, &A));
128:     PetscCall(PetscObjectSetName((PetscObject)A, "Converted ViennaCL Matrix"));

130:     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF, n, &v_vcl));
131:     PetscCall(VecCreateSeqViennaCL(PETSC_COMM_SELF, m, &r_vcl));
132:     PetscCall(PetscObjectSetName((PetscObject)r_vcl, "ViennaCL result vector"));
133:     PetscCall(VecSet(v_vcl, val));
134:     PetscCall(MatMult(A, v_vcl, r_vcl));

136:     PetscCall(VecDuplicate(r_vcl, &d_vcl));
137:     PetscCall(VecCopy(r_vcl, d_vcl));
138:     PetscCall(VecAXPY(d_vcl, -1.0, r_vcl));
139:     PetscCall(VecNorm(d_vcl, NORM_INFINITY, &dnorm));
140:     PetscCheck(dnorm <= tol, PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "MPI CPU and MPI ViennaCL Vector results incompatible with inf norm greater than tolerance of %g", tol);

142:     PetscCall(VecDestroy(&v));
143:     PetscCall(VecDestroy(&r));
144:     PetscCall(VecDestroy(&v_vcl));
145:     PetscCall(VecDestroy(&r_vcl));
146:     PetscCall(VecDestroy(&d_vcl));
147:     PetscCall(MatDestroy(&A));
148:   }

150:   /* Create a parallel AIJ matrix, convert it to a new ViennaCL matrix, and apply it to a ViennaCL vector */
151:   if (size > 1) {
152:     Mat               A, A_vcl;
153:     Vec               v, r, v_vcl, r_vcl, d_vcl;
154:     PetscInt          N = 17, M = 31, nz = 5, i, cnt, j, rlow, rhigh;
155:     const PetscScalar val = 1.0;
156:     PetscReal         dnorm;
157:     const PetscReal   tol = 1e-5;

159:     PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, PETSC_DETERMINE, PETSC_DETERMINE, M, N, nz, NULL, nz, NULL, &A));

161:     /* Add nz arbitrary entries per row in arbitrary columns */
162:     PetscCall(MatGetOwnershipRange(A, &rlow, &rhigh));
163:     for (i = rlow; i < rhigh; ++i) {
164:       for (cnt = 0; cnt < nz; ++cnt) {
165:         j = (19 * cnt + (7 * i + 3)) % N;
166:         PetscCall(MatSetValue(A, i, j, (PetscScalar)(0.3 * i + j), INSERT_VALUES));
167:       }
168:     }
169:     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
170:     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

172:     PetscCall(PetscObjectSetName((PetscObject)A, "MPI CPU Matrix"));

174:     PetscCall(MatCreateVecs(A, &v, &r));
175:     PetscCall(PetscObjectSetName((PetscObject)r, "MPI CPU result vector"));
176:     PetscCall(VecSet(v, val));
177:     PetscCall(MatMult(A, v, r));

179:     PetscCall(MatConvert(A, MATMPIAIJVIENNACL, MAT_INITIAL_MATRIX, &A_vcl));
180:     PetscCall(PetscObjectSetName((PetscObject)A_vcl, "New MPI ViennaCL Matrix"));

182:     PetscCall(MatCreateVecs(A_vcl, &v_vcl, &r_vcl));
183:     PetscCall(PetscObjectSetName((PetscObject)r_vcl, "ViennaCL result vector"));
184:     PetscCall(VecSet(v_vcl, val));
185:     PetscCall(MatMult(A_vcl, v_vcl, r_vcl));

187:     PetscCall(VecDuplicate(r_vcl, &d_vcl));
188:     PetscCall(VecCopy(r_vcl, d_vcl));
189:     PetscCall(VecAXPY(d_vcl, -1.0, r_vcl));
190:     PetscCall(VecNorm(d_vcl, NORM_INFINITY, &dnorm));
191:     PetscCheck(dnorm <= tol, PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "MPI CPU and MPI ViennaCL Vector results incompatible with inf norm greater than tolerance of %g", tol);

193:     PetscCall(VecDestroy(&v));
194:     PetscCall(VecDestroy(&r));
195:     PetscCall(VecDestroy(&v_vcl));
196:     PetscCall(VecDestroy(&r_vcl));
197:     PetscCall(VecDestroy(&d_vcl));
198:     PetscCall(MatDestroy(&A));
199:     PetscCall(MatDestroy(&A_vcl));
200:   }

202:   /* Create a parallel AIJ matrix, convert it in-place to a ViennaCL matrix, and apply it to a ViennaCL vector */
203:   if (size > 1) {
204:     Mat               A;
205:     Vec               v, r, v_vcl, r_vcl, d_vcl;
206:     PetscInt          N = 17, M = 31, nz = 5, i, cnt, j, rlow, rhigh;
207:     const PetscScalar val = 1.0;
208:     PetscReal         dnorm;
209:     const PetscReal   tol = 1e-5;

211:     PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, PETSC_DETERMINE, PETSC_DETERMINE, M, N, nz, NULL, nz, NULL, &A));

213:     /* Add nz arbitrary entries per row in arbitrary columns */
214:     PetscCall(MatGetOwnershipRange(A, &rlow, &rhigh));
215:     for (i = rlow; i < rhigh; ++i) {
216:       for (cnt = 0; cnt < nz; ++cnt) {
217:         j = (19 * cnt + (7 * i + 3)) % N;
218:         PetscCall(MatSetValue(A, i, j, (PetscScalar)(0.3 * i + j), INSERT_VALUES));
219:       }
220:     }
221:     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
222:     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

224:     PetscCall(PetscObjectSetName((PetscObject)A, "MPI CPU Matrix"));

226:     PetscCall(MatCreateVecs(A, &v, &r));
227:     PetscCall(PetscObjectSetName((PetscObject)r, "MPI CPU result vector"));
228:     PetscCall(VecSet(v, val));
229:     PetscCall(MatMult(A, v, r));

231:     PetscCall(MatConvert(A, MATMPIAIJVIENNACL, MAT_INPLACE_MATRIX, &A));
232:     PetscCall(PetscObjectSetName((PetscObject)A, "Converted MPI ViennaCL Matrix"));

234:     PetscCall(MatCreateVecs(A, &v_vcl, &r_vcl));
235:     PetscCall(PetscObjectSetName((PetscObject)r_vcl, "ViennaCL result vector"));
236:     PetscCall(VecSet(v_vcl, val));
237:     PetscCall(MatMult(A, v_vcl, r_vcl));

239:     PetscCall(VecDuplicate(r_vcl, &d_vcl));
240:     PetscCall(VecCopy(r_vcl, d_vcl));
241:     PetscCall(VecAXPY(d_vcl, -1.0, r_vcl));
242:     PetscCall(VecNorm(d_vcl, NORM_INFINITY, &dnorm));
243:     PetscCheck(dnorm <= tol, PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "MPI CPU and MPI ViennaCL Vector results incompatible with inf norm greater than tolerance of %g", tol);

245:     PetscCall(VecDestroy(&v));
246:     PetscCall(VecDestroy(&r));
247:     PetscCall(VecDestroy(&v_vcl));
248:     PetscCall(VecDestroy(&r_vcl));
249:     PetscCall(VecDestroy(&d_vcl));
250:     PetscCall(MatDestroy(&A));
251:   }

253:   PetscCall(PetscFinalize());
254:   return 0;
255: }

257: /*TEST

259:    build:
260:       requires: viennacl

262:    test:
263:       output_file: output/ex204.out

265:    test:
266:       suffix: 2
267:       nsize: 2
268:       output_file: output/ex204.out

270: TEST*/