Actual source code: ex80.c
1: static char help[] = "Partition tiny grid.\n\n";
3: /*
4: Include "petscmat.h" so that we can use matrices. Note that this file
5: automatically includes:
6: petscsys.h - base PETSc routines petscvec.h - vectors
7: petscmat.h - matrices
8: petscis.h - index sets
9: petscviewer.h - viewers
10: */
11: #include <petscmat.h>
13: int main(int argc, char **args)
14: {
15: Mat A, At;
16: PetscMPIInt rank, size;
17: PetscInt *ia, *ja, row;
18: MatPartitioning part;
19: IS is, isn;
20: PetscBool equal;
22: PetscFunctionBeginUser;
23: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
24: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
25: PetscCheck(size == 4, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must run with 4 processors");
26: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
28: PetscCall(PetscMalloc1(5, &ia));
29: PetscCall(PetscMalloc1(16, &ja));
30: if (rank == 0) {
31: ja[0] = 1;
32: ja[1] = 4;
33: ja[2] = 0;
34: ja[3] = 2;
35: ja[4] = 5;
36: ja[5] = 1;
37: ja[6] = 3;
38: ja[7] = 6;
39: ja[8] = 2;
40: ja[9] = 7;
41: ia[0] = 0;
42: ia[1] = 2;
43: ia[2] = 5;
44: ia[3] = 8;
45: ia[4] = 10;
46: } else if (rank == 1) {
47: ja[0] = 0;
48: ja[1] = 5;
49: ja[2] = 8;
50: ja[3] = 1;
51: ja[4] = 4;
52: ja[5] = 6;
53: ja[6] = 9;
54: ja[7] = 2;
55: ja[8] = 5;
56: ja[9] = 7;
57: ja[10] = 10;
58: ja[11] = 3;
59: ja[12] = 6;
60: ja[13] = 11;
61: ia[0] = 0;
62: ia[1] = 3;
63: ia[2] = 7;
64: ia[3] = 11;
65: ia[4] = 14;
66: } else if (rank == 2) {
67: ja[0] = 4;
68: ja[1] = 9;
69: ja[2] = 12;
70: ja[3] = 5;
71: ja[4] = 8;
72: ja[5] = 10;
73: ja[6] = 13;
74: ja[7] = 6;
75: ja[8] = 9;
76: ja[9] = 11;
77: ja[10] = 14;
78: ja[11] = 7;
79: ja[12] = 10;
80: ja[13] = 15;
81: ia[0] = 0;
82: ia[1] = 3;
83: ia[2] = 7;
84: ia[3] = 11;
85: ia[4] = 14;
86: } else {
87: ja[0] = 8;
88: ja[1] = 13;
89: ja[2] = 9;
90: ja[3] = 12;
91: ja[4] = 14;
92: ja[5] = 10;
93: ja[6] = 13;
94: ja[7] = 15;
95: ja[8] = 11;
96: ja[9] = 14;
97: ia[0] = 0;
98: ia[1] = 2;
99: ia[2] = 5;
100: ia[3] = 8;
101: ia[4] = 10;
102: }
104: PetscCall(MatCreateMPIAdj(PETSC_COMM_WORLD, 4, 16, ia, ja, NULL, &A));
105: PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));
107: /* Create the same matrix but using MatSetValues() */
108: PetscCall(MatCreate(PETSC_COMM_WORLD, &At));
109: PetscCall(MatSetSizes(At, 4, 4, 16, 16));
110: PetscCall(MatSetType(At, MATMPIADJ));
111: for (PetscInt i = 0; i < 4; i++) {
112: row = i + 4 * rank;
113: PetscCall(MatSetValues(At, 1, &row, ia[i + 1] - ia[i], ja + ia[i], NULL, INSERT_VALUES));
114: }
115: PetscCall(MatAssemblyBegin(At, MAT_FINAL_ASSEMBLY));
116: PetscCall(MatAssemblyEnd(At, MAT_FINAL_ASSEMBLY));
117: PetscCall(MatEqual(A, At, &equal));
118: PetscCheck(equal, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Matrices are not equal that should be equal");
119: PetscCall(MatDestroy(&At));
121: /*
122: Partition the graph of the matrix
123: */
124: PetscCall(MatPartitioningCreate(PETSC_COMM_WORLD, &part));
125: PetscCall(MatPartitioningSetAdjacency(part, A));
126: PetscCall(MatPartitioningSetFromOptions(part));
127: /* get new processor owner number of each vertex */
128: PetscCall(MatPartitioningApply(part, &is));
129: /* get new global number of each old global number */
130: PetscCall(ISPartitioningToNumbering(is, &isn));
131: PetscCall(ISView(isn, PETSC_VIEWER_STDOUT_WORLD));
132: PetscCall(ISDestroy(&is));
134: PetscCall(ISDestroy(&isn));
135: PetscCall(MatPartitioningDestroy(&part));
137: /*
138: Free work space. All PETSc objects should be destroyed when they
139: are no longer needed.
140: */
141: PetscCall(MatDestroy(&A));
143: PetscCall(PetscFinalize());
144: return 0;
145: }
146: /*
147: test:
148: requires: parmetis
149: args: -mat_view
150: nsize: 4
151: */