Actual source code: ex9.c

  1: static char help[] = "This example shows 1) how to transfer vectors from a parent communicator to vectors on a child communicator and vice versa;\n\
  2:   2) how to transfer vectors from a subcommunicator to vectors on another subcommunicator. The two subcommunicators are not\n\
  3:   required to cover all processes in PETSC_COMM_WORLD; 3) how to copy a vector from a parent communicator to vectors on its child communicators.\n\
  4:   To run any example with VECCUDA vectors, add -vectype cuda to the argument list\n\n";

  6: #include <petscvec.h>
  7: int main(int argc, char **argv)
  8: {
  9:   PetscMPIInt nproc, grank, mycolor;
 10:   PetscInt    i, n, N = 20, low, high;
 11:   MPI_Comm    subcomm;
 12:   Vec         x  = NULL; /* global vectors on PETSC_COMM_WORLD */
 13:   Vec         yg = NULL; /* global vectors on PETSC_COMM_WORLD */
 14:   VecScatter  vscat;
 15:   IS          ix, iy;
 16:   PetscBool   iscuda = PETSC_FALSE; /* Option to use VECCUDA vectors */
 17:   PetscBool   optionflag, compareflag;
 18:   char        vectypename[PETSC_MAX_PATH_LEN];
 19:   PetscBool   world2sub  = PETSC_FALSE; /* Copy a vector from WORLD to a subcomm? */
 20:   PetscBool   sub2sub    = PETSC_FALSE; /* Copy a vector from a subcomm to another subcomm? */
 21:   PetscBool   world2subs = PETSC_FALSE; /* Copy a vector from WORLD to multiple subcomms? */

 23:   PetscFunctionBeginUser;
 24:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 25:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &nproc));
 26:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &grank));

 28:   PetscCheck(nproc >= 2, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "This test must have at least two processes to run");

 30:   PetscCall(PetscOptionsGetBool(NULL, 0, "-world2sub", &world2sub, NULL));
 31:   PetscCall(PetscOptionsGetBool(NULL, 0, "-sub2sub", &sub2sub, NULL));
 32:   PetscCall(PetscOptionsGetBool(NULL, 0, "-world2subs", &world2subs, NULL));
 33:   PetscCall(PetscOptionsGetString(NULL, NULL, "-vectype", vectypename, sizeof(vectypename), &optionflag));
 34:   if (optionflag) {
 35:     PetscCall(PetscStrncmp(vectypename, "cuda", (size_t)4, &compareflag));
 36:     if (compareflag) iscuda = PETSC_TRUE;
 37:   }

 39:   /* Split PETSC_COMM_WORLD into three subcomms. Each process can only see the subcomm it belongs to */
 40:   mycolor = grank % 3;
 41:   PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &subcomm));

 43:   /*===========================================================================
 44:    *  Transfer a vector x defined on PETSC_COMM_WORLD to a vector y defined on
 45:    *  a subcommunicator of PETSC_COMM_WORLD and vice versa.
 46:    *===========================================================================*/
 47:   if (world2sub) {
 48:     PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
 49:     PetscCall(VecSetSizes(x, PETSC_DECIDE, N));
 50:     if (iscuda) PetscCall(VecSetType(x, VECCUDA));
 51:     else PetscCall(VecSetType(x, VECSTANDARD));
 52:     PetscCall(VecSetUp(x));
 53:     PetscCall(PetscObjectSetName((PetscObject)x, "x_commworld")); /* Give a name to view x clearly */

 55:     /* Initialize x to [-0.0, -1.0, -2.0, ..., -19.0] */
 56:     PetscCall(VecGetOwnershipRange(x, &low, &high));
 57:     for (i = low; i < high; i++) {
 58:       PetscScalar val = -i;
 59:       PetscCall(VecSetValue(x, i, val, INSERT_VALUES));
 60:     }
 61:     PetscCall(VecAssemblyBegin(x));
 62:     PetscCall(VecAssemblyEnd(x));

 64:     /* Transfer x to a vector y only defined on subcomm0 and vice versa */
 65:     if (mycolor == 0) { /* subcomm0 contains ranks 0, 3, 6, ... in PETSC_COMM_WORLD */
 66:       Vec          y;
 67:       PetscScalar *yvalue;
 68:       PetscCall(VecCreate(subcomm, &y));
 69:       PetscCall(VecSetSizes(y, PETSC_DECIDE, N));
 70:       if (iscuda) PetscCall(VecSetType(y, VECCUDA));
 71:       else PetscCall(VecSetType(y, VECSTANDARD));
 72:       PetscCall(VecSetUp(y));
 73:       PetscCall(PetscObjectSetName((PetscObject)y, "y_subcomm_0")); /* Give a name to view y clearly */
 74:       PetscCall(VecGetLocalSize(y, &n));
 75:       if (iscuda) {
 76: #if defined(PETSC_HAVE_CUDA)
 77:         PetscCall(VecCUDAGetArray(y, &yvalue));
 78: #endif
 79:       } else {
 80:         PetscCall(VecGetArray(y, &yvalue));
 81:       }
 82:       /* Create yg on PETSC_COMM_WORLD and alias yg with y. They share the memory pointed by yvalue.
 83:         Note this is a collective call. All processes have to call it and supply consistent N.
 84:       */
 85:       if (iscuda) {
 86: #if defined(PETSC_HAVE_CUDA)
 87:         PetscCall(VecCreateMPICUDAWithArray(PETSC_COMM_WORLD, 1, n, N, yvalue, &yg));
 88: #endif
 89:       } else {
 90:         PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, n, N, yvalue, &yg));
 91:       }

 93:       /* Create an identity map that makes yg[i] = x[i], i=0..N-1 */
 94:       PetscCall(VecGetOwnershipRange(yg, &low, &high)); /* low, high are global indices */
 95:       PetscCall(ISCreateStride(PETSC_COMM_SELF, high - low, low, 1, &ix));
 96:       PetscCall(ISDuplicate(ix, &iy));

 98:       /* Union of ix's on subcomm0 covers the full range of [0,N) */
 99:       PetscCall(VecScatterCreate(x, ix, yg, iy, &vscat));
100:       PetscCall(VecScatterBegin(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));
101:       PetscCall(VecScatterEnd(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));

103:       /* Once yg got the data from x, we return yvalue to y so that we can use y in other operations.
104:         VecGetArray must be paired with VecRestoreArray.
105:       */
106:       if (iscuda) {
107: #if defined(PETSC_HAVE_CUDA)
108:         PetscCall(VecCUDARestoreArray(y, &yvalue));
109: #endif
110:       } else {
111:         PetscCall(VecRestoreArray(y, &yvalue));
112:       }

114:       /* Libraries on subcomm0 can safely use y now, for example, view and scale it */
115:       PetscCall(VecView(y, PETSC_VIEWER_STDOUT_(subcomm)));
116:       PetscCall(VecScale(y, 2.0));

118:       /* Send the new y back to x */
119:       PetscCall(VecGetArray(y, &yvalue)); /* If VecScale is done on GPU, PETSc will prepare a valid yvalue for access */
120:       /* Supply new yvalue to yg without memory copying */
121:       PetscCall(VecPlaceArray(yg, yvalue));
122:       PetscCall(VecScatterBegin(vscat, yg, x, INSERT_VALUES, SCATTER_REVERSE));
123:       PetscCall(VecScatterEnd(vscat, yg, x, INSERT_VALUES, SCATTER_REVERSE));
124:       PetscCall(VecResetArray(yg));
125:       PetscCall(VecRestoreArray(y, &yvalue));
126:       PetscCall(VecDestroy(&y));
127:     } else {
128:       /* Ranks outside of subcomm0 do not supply values to yg */
129:       if (iscuda) {
130: #if defined(PETSC_HAVE_CUDA)
131:         PetscCall(VecCreateMPICUDAWithArray(PETSC_COMM_WORLD, 1, 0 /*n*/, N, NULL, &yg));
132: #endif
133:       } else {
134:         PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, 0 /*n*/, N, NULL, &yg));
135:       }

137:       /* Ranks in subcomm0 already specified the full range of the identity map. The remaining
138:         ranks just need to create empty ISes to cheat VecScatterCreate.
139:       */
140:       PetscCall(ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, &ix));
141:       PetscCall(ISDuplicate(ix, &iy));

143:       PetscCall(VecScatterCreate(x, ix, yg, iy, &vscat));
144:       PetscCall(VecScatterBegin(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));
145:       PetscCall(VecScatterEnd(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));

147:       /* Send the new y back to x. Ranks outside of subcomm0 actually have nothing to send.
148:         But they have to call VecScatterBegin/End since these routines are collective.
149:       */
150:       PetscCall(VecScatterBegin(vscat, yg, x, INSERT_VALUES, SCATTER_REVERSE));
151:       PetscCall(VecScatterEnd(vscat, yg, x, INSERT_VALUES, SCATTER_REVERSE));
152:     }

154:     PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
155:     PetscCall(ISDestroy(&ix));
156:     PetscCall(ISDestroy(&iy));
157:     PetscCall(VecDestroy(&x));
158:     PetscCall(VecDestroy(&yg));
159:     PetscCall(VecScatterDestroy(&vscat));
160:   } /* world2sub */

162:   /*===========================================================================
163:    *  Transfer a vector x defined on subcomm0 to a vector y defined on
164:    *  subcomm1. The two subcomms are not overlapping and their union is
165:    *  not necessarily equal to PETSC_COMM_WORLD.
166:    *===========================================================================*/
167:   if (sub2sub) {
168:     if (mycolor == 0) {
169:       /* Intentionally declare N as a local variable so that processes in subcomm1 do not know its value */
170:       PetscInt           n, N = 22;
171:       Vec                x, xg, yg;
172:       IS                 ix, iy;
173:       VecScatter         vscat;
174:       const PetscScalar *xvalue;
175:       MPI_Comm           intercomm, parentcomm;
176:       PetscMPIInt        lrank;

178:       PetscCallMPI(MPI_Comm_rank(subcomm, &lrank));
179:       /* x is on subcomm */
180:       PetscCall(VecCreate(subcomm, &x));
181:       PetscCall(VecSetSizes(x, PETSC_DECIDE, N));
182:       if (iscuda) PetscCall(VecSetType(x, VECCUDA));
183:       else PetscCall(VecSetType(x, VECSTANDARD));
184:       PetscCall(VecSetUp(x));
185:       PetscCall(VecGetOwnershipRange(x, &low, &high));

187:       /* initialize x = [0.0, 1.0, 2.0, ..., 21.0] */
188:       for (i = low; i < high; i++) {
189:         PetscScalar val = i;
190:         PetscCall(VecSetValue(x, i, val, INSERT_VALUES));
191:       }
192:       PetscCall(VecAssemblyBegin(x));
193:       PetscCall(VecAssemblyEnd(x));

195:       PetscCallMPI(MPI_Intercomm_create(subcomm, 0, PETSC_COMM_WORLD /*peer_comm*/, 1, 100 /*tag*/, &intercomm));

197:       /* Tell rank 0 of subcomm1 the global size of x */
198:       if (!lrank) PetscCallMPI(MPI_Send(&N, 1, MPIU_INT, 0 /*receiver's rank in remote comm, i.e., subcomm1*/, 200 /*tag*/, intercomm));

200:       /* Create an intracomm PETSc can work on. Ranks in subcomm0 are ordered before ranks in subcomm1 in parentcomm.
201:         But this order actually does not matter, since what we care is vector y, which is defined on subcomm1.
202:       */
203:       PetscCallMPI(MPI_Intercomm_merge(intercomm, 0 /*low*/, &parentcomm));

205:       /* Create a vector xg on parentcomm, which shares memory with x */
206:       PetscCall(VecGetLocalSize(x, &n));
207:       if (iscuda) {
208: #if defined(PETSC_HAVE_CUDA)
209:         PetscCall(VecCUDAGetArrayRead(x, &xvalue));
210:         PetscCall(VecCreateMPICUDAWithArray(parentcomm, 1, n, N, xvalue, &xg));
211: #endif
212:       } else {
213:         PetscCall(VecGetArrayRead(x, &xvalue));
214:         PetscCall(VecCreateMPIWithArray(parentcomm, 1, n, N, xvalue, &xg));
215:       }

217:       /* Ranks in subcomm 0 have nothing on yg, so they simply have n=0, array=NULL */
218:       if (iscuda) {
219: #if defined(PETSC_HAVE_CUDA)
220:         PetscCall(VecCreateMPICUDAWithArray(parentcomm, 1, 0 /*n*/, N, NULL /*array*/, &yg));
221: #endif
222:       } else {
223:         PetscCall(VecCreateMPIWithArray(parentcomm, 1, 0 /*n*/, N, NULL /*array*/, &yg));
224:       }

226:       /* Create the vecscatter, which does identity map by setting yg[i] = xg[i], i=0..N-1. */
227:       PetscCall(VecGetOwnershipRange(xg, &low, &high)); /* low, high are global indices of xg */
228:       PetscCall(ISCreateStride(PETSC_COMM_SELF, high - low, low, 1, &ix));
229:       PetscCall(ISDuplicate(ix, &iy));
230:       PetscCall(VecScatterCreate(xg, ix, yg, iy, &vscat));

232:       /* Scatter values from xg to yg */
233:       PetscCall(VecScatterBegin(vscat, xg, yg, INSERT_VALUES, SCATTER_FORWARD));
234:       PetscCall(VecScatterEnd(vscat, xg, yg, INSERT_VALUES, SCATTER_FORWARD));

236:       /* After the VecScatter is done, xg is idle so we can safely return xvalue to x */
237:       if (iscuda) {
238: #if defined(PETSC_HAVE_CUDA)
239:         PetscCall(VecCUDARestoreArrayRead(x, &xvalue));
240: #endif
241:       } else {
242:         PetscCall(VecRestoreArrayRead(x, &xvalue));
243:       }
244:       PetscCall(VecDestroy(&x));
245:       PetscCall(ISDestroy(&ix));
246:       PetscCall(ISDestroy(&iy));
247:       PetscCall(VecDestroy(&xg));
248:       PetscCall(VecDestroy(&yg));
249:       PetscCall(VecScatterDestroy(&vscat));
250:       PetscCallMPI(MPI_Comm_free(&intercomm));
251:       PetscCallMPI(MPI_Comm_free(&parentcomm));
252:     } else if (mycolor == 1) { /* subcomm 1, containing ranks 1, 4, 7, ... in PETSC_COMM_WORLD */
253:       PetscInt     n, N;
254:       Vec          y, xg, yg;
255:       IS           ix, iy;
256:       VecScatter   vscat;
257:       PetscScalar *yvalue;
258:       MPI_Comm     intercomm, parentcomm;
259:       PetscMPIInt  lrank;

261:       PetscCallMPI(MPI_Comm_rank(subcomm, &lrank));
262:       PetscCallMPI(MPI_Intercomm_create(subcomm, 0, PETSC_COMM_WORLD /*peer_comm*/, 0 /*remote_leader*/, 100 /*tag*/, &intercomm));

264:       /* Two rank-0 are talking */
265:       if (!lrank) PetscCallMPI(MPI_Recv(&N, 1, MPIU_INT, 0 /*sender's rank in remote comm, i.e. subcomm0*/, 200 /*tag*/, intercomm, MPI_STATUS_IGNORE));
266:       /* Rank 0 of subcomm1 bcasts N to its members */
267:       PetscCallMPI(MPI_Bcast(&N, 1, MPIU_INT, 0 /*local root*/, subcomm));

269:       /* Create a intracomm PETSc can work on */
270:       PetscCallMPI(MPI_Intercomm_merge(intercomm, 1 /*high*/, &parentcomm));

272:       /* Ranks in subcomm1 have nothing on xg, so they simply have n=0, array=NULL.*/
273:       if (iscuda) {
274: #if defined(PETSC_HAVE_CUDA)
275:         PetscCall(VecCreateMPICUDAWithArray(parentcomm, 1 /*bs*/, 0 /*n*/, N, NULL /*array*/, &xg));
276: #endif
277:       } else {
278:         PetscCall(VecCreateMPIWithArray(parentcomm, 1 /*bs*/, 0 /*n*/, N, NULL /*array*/, &xg));
279:       }

281:       PetscCall(VecCreate(subcomm, &y));
282:       PetscCall(VecSetSizes(y, PETSC_DECIDE, N));
283:       if (iscuda) PetscCall(VecSetType(y, VECCUDA));
284:       else PetscCall(VecSetType(y, VECSTANDARD));
285:       PetscCall(VecSetUp(y));

287:       PetscCall(PetscObjectSetName((PetscObject)y, "y_subcomm_1")); /* Give a name to view y clearly */
288:       PetscCall(VecGetLocalSize(y, &n));
289:       if (iscuda) {
290: #if defined(PETSC_HAVE_CUDA)
291:         PetscCall(VecCUDAGetArray(y, &yvalue));
292: #endif
293:       } else {
294:         PetscCall(VecGetArray(y, &yvalue));
295:       }
296:       /* Create a vector yg on parentcomm, which shares memory with y. xg and yg must be
297:         created in the same order in subcomm0/1. For example, we can not reverse the order of
298:         creating xg and yg in subcomm1.
299:       */
300:       if (iscuda) {
301: #if defined(PETSC_HAVE_CUDA)
302:         PetscCall(VecCreateMPICUDAWithArray(parentcomm, 1 /*bs*/, n, N, yvalue, &yg));
303: #endif
304:       } else {
305:         PetscCall(VecCreateMPIWithArray(parentcomm, 1 /*bs*/, n, N, yvalue, &yg));
306:       }

308:       /* Ranks in subcomm0 already specified the full range of the identity map.
309:         ranks in subcomm1 just need to create empty ISes to cheat VecScatterCreate.
310:       */
311:       PetscCall(ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, &ix));
312:       PetscCall(ISDuplicate(ix, &iy));
313:       PetscCall(VecScatterCreate(xg, ix, yg, iy, &vscat));

315:       /* Scatter values from xg to yg */
316:       PetscCall(VecScatterBegin(vscat, xg, yg, INSERT_VALUES, SCATTER_FORWARD));
317:       PetscCall(VecScatterEnd(vscat, xg, yg, INSERT_VALUES, SCATTER_FORWARD));

319:       /* After the VecScatter is done, values in yg are available. y is our interest, so we return yvalue to y */
320:       if (iscuda) {
321: #if defined(PETSC_HAVE_CUDA)
322:         PetscCall(VecCUDARestoreArray(y, &yvalue));
323: #endif
324:       } else {
325:         PetscCall(VecRestoreArray(y, &yvalue));
326:       }

328:       /* Libraries on subcomm1 can safely use y now, for example, view it */
329:       PetscCall(VecView(y, PETSC_VIEWER_STDOUT_(subcomm)));

331:       PetscCall(VecDestroy(&y));
332:       PetscCall(ISDestroy(&ix));
333:       PetscCall(ISDestroy(&iy));
334:       PetscCall(VecDestroy(&xg));
335:       PetscCall(VecDestroy(&yg));
336:       PetscCall(VecScatterDestroy(&vscat));
337:       PetscCallMPI(MPI_Comm_free(&intercomm));
338:       PetscCallMPI(MPI_Comm_free(&parentcomm));
339:     } else if (mycolor == 2) { /* subcomm2 */
340:       /* Processes in subcomm2 do not participate in the VecScatter. They can freely do unrelated things on subcomm2 */
341:     }
342:   } /* sub2sub */

344:   /*===========================================================================
345:    *  Copy a vector x defined on PETSC_COMM_WORLD to vectors y defined on
346:    *  every subcommunicator of PETSC_COMM_WORLD. We could use multiple transfers
347:    *  as we did in case 1, but that is not efficient. Instead, we use one vecscatter
348:    *  to achieve that.
349:    *===========================================================================*/
350:   if (world2subs) {
351:     Vec          y;
352:     PetscInt     n, N = 15, xstart, ystart, low, high;
353:     PetscScalar *yvalue;

355:     /* Initialize x to [0, 1, 2, 3, ..., N-1] */
356:     PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
357:     PetscCall(VecSetSizes(x, PETSC_DECIDE, N));
358:     if (iscuda) PetscCall(VecSetType(x, VECCUDA));
359:     else PetscCall(VecSetType(x, VECSTANDARD));
360:     PetscCall(VecSetUp(x));
361:     PetscCall(VecGetOwnershipRange(x, &low, &high));
362:     for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i, INSERT_VALUES));
363:     PetscCall(VecAssemblyBegin(x));
364:     PetscCall(VecAssemblyEnd(x));

366:     /* Every subcomm has a y as long as x */
367:     PetscCall(VecCreate(subcomm, &y));
368:     PetscCall(VecSetSizes(y, PETSC_DECIDE, N));
369:     if (iscuda) PetscCall(VecSetType(y, VECCUDA));
370:     else PetscCall(VecSetType(y, VECSTANDARD));
371:     PetscCall(VecSetUp(y));
372:     PetscCall(VecGetLocalSize(y, &n));

374:     /* Create a global vector yg on PETSC_COMM_WORLD using y's memory. yg's global size = N*(number of subcommunicators).
375:        Eeach rank in subcomms contributes a piece to construct the global yg. Keep in mind that pieces from a subcomm are not
376:        necessarily consecutive in yg. That depends on how PETSC_COMM_WORLD is split. In our case, subcomm0 is made of rank
377:        0, 3, 6 etc from PETSC_COMM_WORLD. So subcomm0's pieces are interleaved with pieces from other subcomms in yg.
378:     */
379:     if (iscuda) {
380: #if defined(PETSC_HAVE_CUDA)
381:       PetscCall(VecCUDAGetArray(y, &yvalue));
382:       PetscCall(VecCreateMPICUDAWithArray(PETSC_COMM_WORLD, 1, n, PETSC_DECIDE, yvalue, &yg));
383: #endif
384:     } else {
385:       PetscCall(VecGetArray(y, &yvalue));
386:       PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, n, PETSC_DECIDE, yvalue, &yg));
387:     }
388:     PetscCall(PetscObjectSetName((PetscObject)yg, "yg_on_subcomms")); /* Give a name to view yg clearly */

390:     /* The following two lines are key. From xstart, we know where to pull entries from x. Note that we get xstart from y,
391:        since first entry of y on this rank is from x[xstart]. From ystart, we know where ot put entries to yg.
392:      */
393:     PetscCall(VecGetOwnershipRange(y, &xstart, NULL));
394:     PetscCall(VecGetOwnershipRange(yg, &ystart, NULL));

396:     PetscCall(ISCreateStride(PETSC_COMM_SELF, n, xstart, 1, &ix));
397:     PetscCall(ISCreateStride(PETSC_COMM_SELF, n, ystart, 1, &iy));
398:     PetscCall(VecScatterCreate(x, ix, yg, iy, &vscat));
399:     PetscCall(VecScatterBegin(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));
400:     PetscCall(VecScatterEnd(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));

402:     /* View yg on PETSC_COMM_WORLD before destroying it. We shall see the interleaving effect in output. */
403:     PetscCall(VecView(yg, PETSC_VIEWER_STDOUT_WORLD));
404:     PetscCall(VecDestroy(&yg));

406:     /* Restory yvalue so that processes in subcomm can use y from now on. */
407:     if (iscuda) {
408: #if defined(PETSC_HAVE_CUDA)
409:       PetscCall(VecCUDARestoreArray(y, &yvalue));
410: #endif
411:     } else {
412:       PetscCall(VecRestoreArray(y, &yvalue));
413:     }
414:     PetscCall(VecScale(y, 3.0));

416:     PetscCall(ISDestroy(&ix)); /* One can also destroy ix, iy immediately after VecScatterCreate() */
417:     PetscCall(ISDestroy(&iy));
418:     PetscCall(VecDestroy(&x));
419:     PetscCall(VecDestroy(&y));
420:     PetscCall(VecScatterDestroy(&vscat));
421:   } /* world2subs */

423:   PetscCallMPI(MPI_Comm_free(&subcomm));
424:   PetscCall(PetscFinalize());
425:   return 0;
426: }

428: /*TEST

430:    build:
431:      requires: !defined(PETSC_HAVE_MPIUNI)

433:    testset:
434:      nsize: 7

436:      test:
437:        suffix: 1
438:        args: -world2sub

440:      test:
441:        suffix: 2
442:        args: -sub2sub
443:        # deadlocks with NECMPI and INTELMPI (20210400300)
444:        requires: !defined(PETSC_HAVE_NECMPI) !defined(PETSC_HAVE_I_MPI)

446:      test:
447:        suffix: 3
448:        args: -world2subs

450:      test:
451:        suffix: 4
452:        args: -world2sub -vectype cuda
453:        requires: cuda

455:      test:
456:        suffix: 5
457:        args: -sub2sub -vectype cuda
458:        requires: cuda

460:      test:
461:       suffix: 6
462:       args: -world2subs -vectype cuda
463:       requires: cuda

465:    testset:
466:      nsize: 7
467:      args: -world2sub -sf_type neighbor
468:      output_file: output/ex9_1.out
469:      # segfaults with NECMPI
470:      requires: defined(PETSC_HAVE_MPI_NEIGHBORHOOD_COLLECTIVES) !defined(PETSC_HAVE_NECMPI)

472:      test:
473:        suffix: 71

475:      test:
476:        suffix: 72
477:        requires: defined(PETSC_HAVE_MPI_PERSISTENT_NEIGHBORHOOD_COLLECTIVES)
478:        args: -sf_neighbor_persistent

480: TEST*/