Actual source code: ex41.c
2: static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
3: is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
4: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
5: -nd <size> : > 0 no of domains per processor \n\
6: -ov <overlap> : >=0 amount of overlap between domains\n\n";
8: #include <petscmat.h>
10: int main(int argc,char **args)
11: {
12: PetscInt nd = 2,ov=1,i,j,m,n,*idx,lsize;
13: PetscMPIInt rank;
14: PetscBool flg;
15: Mat A,B;
16: char file[PETSC_MAX_PATH_LEN];
17: PetscViewer fd;
18: IS *is1,*is2;
19: PetscRandom r;
20: PetscScalar rand;
22: PetscInitialize(&argc,&args,(char*)0,help);
23: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
24: PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),NULL);
25: PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);
26: PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);
28: /* Read matrix and RHS */
29: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
30: MatCreate(PETSC_COMM_WORLD,&A);
31: MatSetType(A,MATMPIAIJ);
32: MatLoad(A,fd);
33: PetscViewerDestroy(&fd);
35: /* Read the matrix again as a seq matrix */
36: PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
37: MatCreate(PETSC_COMM_SELF,&B);
38: MatSetType(B,MATSEQAIJ);
39: MatLoad(B,fd);
40: PetscViewerDestroy(&fd);
42: /* Create the Random no generator */
43: MatGetSize(A,&m,&n);
44: PetscRandomCreate(PETSC_COMM_SELF,&r);
45: PetscRandomSetFromOptions(r);
47: /* Create the IS corresponding to subdomains */
48: PetscMalloc1(nd,&is1);
49: PetscMalloc1(nd,&is2);
50: PetscMalloc1(m ,&idx);
52: /* Create the random Index Sets */
53: for (i=0; i<nd; i++) {
54: for (j=0; j<rank; j++) {
55: PetscRandomGetValue(r,&rand);
56: }
57: PetscRandomGetValue(r,&rand);
58: lsize = (PetscInt)(rand*m);
59: for (j=0; j<lsize; j++) {
60: PetscRandomGetValue(r,&rand);
61: idx[j] = (PetscInt)(rand*m);
62: }
63: ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is1+i);
64: ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is2+i);
65: }
67: MatIncreaseOverlap(A,nd,is1,ov);
68: MatIncreaseOverlap(B,nd,is2,ov);
70: /* Now see if the serial and parallel case have the same answers */
71: for (i=0; i<nd; ++i) {
72: PetscInt sz1,sz2;
73: ISEqual(is1[i],is2[i],&flg);
74: ISGetSize(is1[i],&sz1);
75: ISGetSize(is2[i],&sz2);
77: }
79: /* Free Allocated Memory */
80: for (i=0; i<nd; ++i) {
81: ISDestroy(&is1[i]);
82: ISDestroy(&is2[i]);
83: }
84: PetscRandomDestroy(&r);
85: PetscFree(is1);
86: PetscFree(is2);
87: MatDestroy(&A);
88: MatDestroy(&B);
89: PetscFree(idx);
90: PetscFinalize();
91: return 0;
92: }
94: /*TEST
96: build:
97: requires: !complex
99: test:
100: nsize: 3
101: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
102: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 3 -ov 1
104: TEST*/