Actual source code: ex9.c
1: static char help[] = "Demonstrates use of VecCreateGhost().\n\n";
3: /*
4: Description: Ghost padding is one way to handle local calculations that
5: involve values from other processors. VecCreateGhost() provides
6: a way to create vectors with extra room at the end of the vector
7: array to contain the needed ghost values from other processors,
8: vector computations are otherwise unaffected.
9: */
11: /*
12: Include "petscvec.h" so that we can use vectors. Note that this file
13: automatically includes:
14: petscsys.h - base PETSc routines petscis.h - index sets
15: petscviewer.h - viewers
16: */
17: #include <petscvec.h>
19: int main(int argc, char **argv)
20: {
21: PetscMPIInt rank, size;
22: PetscInt nlocal = 6, nghost = 2, ifrom[2], i, rstart, rend;
23: PetscBool flg, flg2, flg3, flg4, flg5, setbefore = PETSC_FALSE;
24: PetscScalar value, *array, *tarray = 0;
25: Vec lx, gx, gxs;
26: IS ghost;
27: ISLocalToGlobalMapping mapping;
29: PetscFunctionBeginUser;
30: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
31: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
32: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
33: PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must run example with two processors");
35: /*
36: Construct a two dimensional graph connecting nlocal degrees of
37: freedom per processor. From this we will generate the global
38: indices of needed ghost values
40: For simplicity we generate the entire graph on each processor:
41: in real application the graph would stored in parallel, but this
42: example is only to demonstrate the management of ghost padding
43: with VecCreateGhost().
45: In this example we consider the vector as representing
46: degrees of freedom in a one dimensional grid with periodic
47: boundary conditions.
49: ----Processor 1--------- ----Processor 2 --------
50: 0 1 2 3 4 5 6 7 8 9 10 11
51: |----|
52: |-------------------------------------------------|
54: */
56: if (rank == 0) {
57: ifrom[0] = 11;
58: ifrom[1] = 6;
59: } else {
60: ifrom[0] = 0;
61: ifrom[1] = 5;
62: }
64: /*
65: Create the vector with two slots for ghost points. Note that both
66: the local vector (lx) and the global vector (gx) share the same
67: array for storing vector values.
68: */
69: PetscCall(PetscOptionsHasName(NULL, NULL, "-allocate", &flg));
70: PetscCall(PetscOptionsHasName(NULL, NULL, "-vecmpisetghost", &flg2));
71: PetscCall(PetscOptionsHasName(NULL, NULL, "-minvalues", &flg3));
72: if (flg) {
73: PetscCall(PetscMalloc1(nlocal + nghost, &tarray));
74: PetscCall(VecCreateGhostWithArray(PETSC_COMM_WORLD, nlocal, PETSC_DECIDE, nghost, ifrom, tarray, &gxs));
75: PetscCall(VecSetFromOptions(gxs));
76: } else if (flg2) {
77: PetscCall(VecCreate(PETSC_COMM_WORLD, &gxs));
78: PetscCall(PetscOptionsGetBool(NULL, NULL, "-setbefore", &setbefore, NULL)); // Set a vector type before VecMPISetGhost()
79: if (setbefore) PetscCall(VecSetFromOptions(gxs));
80: else PetscCall(VecSetType(gxs, VECMPI));
81: PetscCall(VecSetSizes(gxs, nlocal, PETSC_DECIDE));
82: PetscCall(VecMPISetGhost(gxs, nghost, ifrom));
83: if (!setbefore) PetscCall(VecSetFromOptions(gxs));
84: } else {
85: PetscCall(VecCreateGhost(PETSC_COMM_WORLD, nlocal, PETSC_DECIDE, nghost, ifrom, &gxs));
86: PetscCall(VecSetFromOptions(gxs));
87: PetscCall(VecSet(gxs, 1.0));
88: if (rank == 1) PetscCall(VecSetValueLocal(gxs, 0, 2.0, INSERT_VALUES));
89: PetscCall(VecAssemblyBegin(gxs));
90: PetscCall(VecAssemblyEnd(gxs));
91: value = 0.0;
92: if (rank == 1) {
93: PetscCall(VecGetArray(gxs, &array));
94: value = array[0];
95: PetscCall(VecRestoreArray(gxs, &array));
96: }
97: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &value, 1, MPIU_SCALAR, MPIU_SUM, PETSC_COMM_WORLD));
98: PetscCheck(PetscIsCloseAtTolScalar(value, 2.0, PETSC_SMALL, PETSC_SMALL), PETSC_COMM_WORLD, PETSC_ERR_PLIB, "%g != 2.0", (double)PetscAbsScalar(value));
99: }
101: /*
102: Test VecDuplicate()
103: */
104: PetscCall(VecDuplicate(gxs, &gx));
105: PetscCall(VecDestroy(&gxs));
107: /*
108: Access the local representation
109: */
110: PetscCall(VecGhostGetLocalForm(gx, &lx));
112: /*
113: Set the values from 0 to 12 into the "global" vector
114: */
115: PetscCall(VecGetOwnershipRange(gx, &rstart, &rend));
116: for (i = rstart; i < rend; i++) {
117: value = (PetscScalar)i;
118: PetscCall(VecSetValues(gx, 1, &i, &value, INSERT_VALUES));
119: }
120: PetscCall(VecAssemblyBegin(gx));
121: PetscCall(VecAssemblyEnd(gx));
123: PetscCall(VecGhostUpdateBegin(gx, INSERT_VALUES, SCATTER_FORWARD));
124: PetscCall(VecGhostUpdateEnd(gx, INSERT_VALUES, SCATTER_FORWARD));
126: /*
127: Print out each vector, including the ghost padding region.
128: */
129: PetscCall(VecGetArray(lx, &array));
130: for (i = 0; i < nlocal + nghost; i++) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%" PetscInt_FMT " %g\n", i, (double)PetscRealPart(array[i])));
131: PetscCall(VecRestoreArray(lx, &array));
132: PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
133: PetscCall(VecGhostRestoreLocalForm(gx, &lx));
135: /* Another test that sets ghost values and then accumulates onto the owning processors using MIN_VALUES */
136: if (flg3) {
137: if (rank == 0) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\nTesting VecGhostUpdate with MIN_VALUES\n"));
138: PetscCall(VecGhostGetLocalForm(gx, &lx));
139: PetscCall(VecGetArray(lx, &array));
140: for (i = 0; i < nghost; i++) array[nlocal + i] = rank ? (PetscScalar)4 : (PetscScalar)8;
141: PetscCall(VecRestoreArray(lx, &array));
142: PetscCall(VecGhostRestoreLocalForm(gx, &lx));
144: PetscCall(VecGhostUpdateBegin(gx, MIN_VALUES, SCATTER_REVERSE));
145: PetscCall(VecGhostUpdateEnd(gx, MIN_VALUES, SCATTER_REVERSE));
147: PetscCall(VecGhostGetLocalForm(gx, &lx));
148: PetscCall(VecGetArray(lx, &array));
150: for (i = 0; i < nlocal + nghost; i++) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%" PetscInt_FMT " %g\n", i, (double)PetscRealPart(array[i])));
151: PetscCall(VecRestoreArray(lx, &array));
152: PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
153: PetscCall(VecGhostRestoreLocalForm(gx, &lx));
154: }
156: PetscCall(PetscOptionsHasName(NULL, NULL, "-vecghostgetghostis", &flg4));
157: if (flg4) {
158: PetscCall(VecGhostGetGhostIS(gx, &ghost));
159: PetscCall(ISView(ghost, PETSC_VIEWER_STDOUT_WORLD));
160: }
161: PetscCall(PetscOptionsHasName(NULL, NULL, "-getgtlmapping", &flg5));
162: if (flg5) {
163: PetscCall(VecGetLocalToGlobalMapping(gx, &mapping));
164: if (rank == 0) PetscCall(ISLocalToGlobalMappingView(mapping, NULL));
165: }
167: PetscCall(VecDestroy(&gx));
169: if (flg) PetscCall(PetscFree(tarray));
170: PetscCall(PetscFinalize());
171: return 0;
172: }
174: /*TEST
176: testset:
177: requires: cuda kokkos_kernels
178: args: -vec_type {{mpi cuda kokkos}}
180: test:
181: nsize: 2
183: test:
184: suffix: 2
185: nsize: 2
186: args: -allocate
187: output_file: output/ex9_1.out
189: test:
190: suffix: 3
191: nsize: 2
192: args: -vecmpisetghost -setbefore {{true false}}
193: output_file: output/ex9_1.out
195: test:
196: suffix: 4
197: nsize: 2
198: args: -minvalues
199: output_file: output/ex9_2.out
200: requires: !complex
202: test:
203: suffix: 5
204: nsize: 2
205: args: -vecghostgetghostis
207: test:
208: suffix: 6
209: nsize: 2
210: args: -getgtlmapping
212: TEST*/