Actual source code: ex9.c
1: static char help[] = "Demonstrates use of VecCreateGhost().\n\n";
3: /*
4: Description: Ghost padding is one way to handle local calculations that
5: involve values from other processors. VecCreateGhost() provides
6: a way to create vectors with extra room at the end of the vector
7: array to contain the needed ghost values from other processors,
8: vector computations are otherwise unaffected.
9: */
11: /*
12: Include "petscvec.h" so that we can use vectors. Note that this file
13: automatically includes:
14: petscsys.h - base PETSc routines petscis.h - index sets
15: petscviewer.h - viewers
16: */
17: #include <petscvec.h>
19: int main(int argc, char **argv)
20: {
21: PetscMPIInt rank, size;
22: PetscInt nlocal = 6, nghost = 2, ifrom[2], i, rstart, rend;
23: PetscBool flg, flg2, flg3;
24: PetscScalar value, *array, *tarray = 0;
25: Vec lx, gx, gxs;
27: PetscFunctionBeginUser;
28: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
29: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
30: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
31: PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must run example with two processors");
33: /*
34: Construct a two dimensional graph connecting nlocal degrees of
35: freedom per processor. From this we will generate the global
36: indices of needed ghost values
38: For simplicity we generate the entire graph on each processor:
39: in real application the graph would stored in parallel, but this
40: example is only to demonstrate the management of ghost padding
41: with VecCreateGhost().
43: In this example we consider the vector as representing
44: degrees of freedom in a one dimensional grid with periodic
45: boundary conditions.
47: ----Processor 1--------- ----Processor 2 --------
48: 0 1 2 3 4 5 6 7 8 9 10 11
49: |----|
50: |-------------------------------------------------|
52: */
54: if (rank == 0) {
55: ifrom[0] = 11;
56: ifrom[1] = 6;
57: } else {
58: ifrom[0] = 0;
59: ifrom[1] = 5;
60: }
62: /*
63: Create the vector with two slots for ghost points. Note that both
64: the local vector (lx) and the global vector (gx) share the same
65: array for storing vector values.
66: */
67: PetscCall(PetscOptionsHasName(NULL, NULL, "-allocate", &flg));
68: PetscCall(PetscOptionsHasName(NULL, NULL, "-vecmpisetghost", &flg2));
69: PetscCall(PetscOptionsHasName(NULL, NULL, "-minvalues", &flg3));
70: if (flg) {
71: PetscCall(PetscMalloc1(nlocal + nghost, &tarray));
72: PetscCall(VecCreateGhostWithArray(PETSC_COMM_WORLD, nlocal, PETSC_DECIDE, nghost, ifrom, tarray, &gxs));
73: } else if (flg2) {
74: PetscCall(VecCreate(PETSC_COMM_WORLD, &gxs));
75: PetscCall(VecSetType(gxs, VECMPI));
76: PetscCall(VecSetSizes(gxs, nlocal, PETSC_DECIDE));
77: PetscCall(VecMPISetGhost(gxs, nghost, ifrom));
78: } else {
79: PetscCall(VecCreateGhost(PETSC_COMM_WORLD, nlocal, PETSC_DECIDE, nghost, ifrom, &gxs));
80: }
82: /*
83: Test VecDuplicate()
84: */
85: PetscCall(VecDuplicate(gxs, &gx));
86: PetscCall(VecDestroy(&gxs));
88: /*
89: Access the local representation
90: */
91: PetscCall(VecGhostGetLocalForm(gx, &lx));
93: /*
94: Set the values from 0 to 12 into the "global" vector
95: */
96: PetscCall(VecGetOwnershipRange(gx, &rstart, &rend));
97: for (i = rstart; i < rend; i++) {
98: value = (PetscScalar)i;
99: PetscCall(VecSetValues(gx, 1, &i, &value, INSERT_VALUES));
100: }
101: PetscCall(VecAssemblyBegin(gx));
102: PetscCall(VecAssemblyEnd(gx));
104: PetscCall(VecGhostUpdateBegin(gx, INSERT_VALUES, SCATTER_FORWARD));
105: PetscCall(VecGhostUpdateEnd(gx, INSERT_VALUES, SCATTER_FORWARD));
107: /*
108: Print out each vector, including the ghost padding region.
109: */
110: PetscCall(VecGetArray(lx, &array));
111: for (i = 0; i < nlocal + nghost; i++) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%" PetscInt_FMT " %g\n", i, (double)PetscRealPart(array[i])));
112: PetscCall(VecRestoreArray(lx, &array));
113: PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
114: PetscCall(VecGhostRestoreLocalForm(gx, &lx));
116: /* Another test that sets ghost values and then accumulates onto the owning processors using MIN_VALUES */
117: if (flg3) {
118: if (rank == 0) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\nTesting VecGhostUpdate with MIN_VALUES\n"));
119: PetscCall(VecGhostGetLocalForm(gx, &lx));
120: PetscCall(VecGetArray(lx, &array));
121: for (i = 0; i < nghost; i++) array[nlocal + i] = rank ? (PetscScalar)4 : (PetscScalar)8;
122: PetscCall(VecRestoreArray(lx, &array));
123: PetscCall(VecGhostRestoreLocalForm(gx, &lx));
125: PetscCall(VecGhostUpdateBegin(gx, MIN_VALUES, SCATTER_REVERSE));
126: PetscCall(VecGhostUpdateEnd(gx, MIN_VALUES, SCATTER_REVERSE));
128: PetscCall(VecGhostGetLocalForm(gx, &lx));
129: PetscCall(VecGetArray(lx, &array));
131: for (i = 0; i < nlocal + nghost; i++) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%" PetscInt_FMT " %g\n", i, (double)PetscRealPart(array[i])));
132: PetscCall(VecRestoreArray(lx, &array));
133: PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
134: PetscCall(VecGhostRestoreLocalForm(gx, &lx));
135: }
137: PetscCall(VecDestroy(&gx));
139: if (flg) PetscCall(PetscFree(tarray));
140: PetscCall(PetscFinalize());
141: return 0;
142: }
144: /*TEST
146: test:
147: nsize: 2
149: test:
150: suffix: 2
151: nsize: 2
152: args: -allocate
153: output_file: output/ex9_1.out
155: test:
156: suffix: 3
157: nsize: 2
158: args: -vecmpisetghost
159: output_file: output/ex9_1.out
161: test:
162: suffix: 4
163: nsize: 2
164: args: -minvalues
165: output_file: output/ex9_2.out
166: requires: !complex
168: TEST*/