Actual source code: pmap.c

petsc-master 2019-08-22
Report Typos and Errors

  2: /*
  3:    This file contains routines for basic map object implementation.
  4: */

  6:  #include <petscis.h>
  7:  #include <petscsf.h>
  8:  #include <petsc/private/isimpl.h>

 10: /*@
 11:   PetscLayoutCreate - Allocates PetscLayout space and sets the map contents to the default.

 13:   Collective

 15:   Input Parameters:
 16: + comm - the MPI communicator
 17: - map - pointer to the map

 19:   Level: advanced

 21:   Notes:
 22:   Typical calling sequence
 23: .vb
 24:        PetscLayoutCreate(MPI_Comm,PetscLayout *);
 25:        PetscLayoutSetBlockSize(PetscLayout,1);
 26:        PetscLayoutSetSize(PetscLayout,N) // or PetscLayoutSetLocalSize(PetscLayout,n);
 27:        PetscLayoutSetUp(PetscLayout);
 28: .ve
 29:   Optionally use any of the following:

 31: + PetscLayoutGetSize(PetscLayout,PetscInt *);
 32: . PetscLayoutGetLocalSize(PetscLayout,PetscInt *);
 33: . PetscLayoutGetRange(PetscLayout,PetscInt *rstart,PetscInt *rend);
 34: . PetscLayoutGetRanges(PetscLayout,const PetscInt *range[]);
 35: - PetscLayoutDestroy(PetscLayout*);

 37:   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is often not needed in
 38:   user codes unless you really gain something in their use.

 40: .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
 41:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()

 43: @*/
 44: PetscErrorCode PetscLayoutCreate(MPI_Comm comm,PetscLayout *map)
 45: {

 49:   PetscNew(map);

 51:   (*map)->comm   = comm;
 52:   (*map)->bs     = -1;
 53:   (*map)->n      = -1;
 54:   (*map)->N      = -1;
 55:   (*map)->range  = NULL;
 56:   (*map)->rstart = 0;
 57:   (*map)->rend   = 0;
 58:   return(0);
 59: }

 61: /*@
 62:   PetscLayoutDestroy - Frees a map object and frees its range if that exists.

 64:   Collective

 66:   Input Parameters:
 67: . map - the PetscLayout

 69:   Level: developer

 71:   Note:
 72:   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is
 73:   recommended they not be used in user codes unless you really gain something in their use.

 75: .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutCreate(),
 76:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()

 78: @*/
 79: PetscErrorCode PetscLayoutDestroy(PetscLayout *map)
 80: {

 84:   if (!*map) return(0);
 85:   if (!(*map)->refcnt--) {
 86:     PetscFree((*map)->range);
 87:     ISLocalToGlobalMappingDestroy(&(*map)->mapping);
 88:     PetscFree((*map));
 89:   }
 90:   *map = NULL;
 91:   return(0);
 92: }

 94: static PetscErrorCode PetscLayoutSetUp_SizesFromRanges_Private(PetscLayout map)
 95: {
 96:   PetscMPIInt    rank,size;

100:   MPI_Comm_size(map->comm, &size);
101:   MPI_Comm_rank(map->comm, &rank);
102:   map->rstart = map->range[rank];
103:   map->rend   = map->range[rank+1];
104:   map->n      = map->rend - map->rstart;
105:   map->N      = map->range[size];
106: #if defined(PETSC_USE_DEBUG)
107:   /* just check that n, N and bs are consistent */
108:   {
109:     PetscInt tmp;
110:     MPIU_Allreduce(&map->n,&tmp,1,MPIU_INT,MPI_SUM,map->comm);
111:     if (tmp != map->N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D.\nThe provided PetscLayout is wrong.",tmp,map->N,map->n);
112:   }
113:   if (map->bs > 1) {
114:     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
115:   }
116:   if (map->bs > 1) {
117:     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
118:   }
119: #endif
120:   return(0);
121: }

123: /*@
124:   PetscLayoutSetUp - given a map where you have set either the global or local
125:                      size sets up the map so that it may be used.

127:   Collective

129:   Input Parameters:
130: . map - pointer to the map

132:   Level: developer

134:   Notes:
135:     Typical calling sequence
136: $ PetscLayoutCreate(MPI_Comm,PetscLayout *);
137: $ PetscLayoutSetBlockSize(PetscLayout,1);
138: $ PetscLayoutSetSize(PetscLayout,n) or PetscLayoutSetLocalSize(PetscLayout,N); or both
139: $ PetscLayoutSetUp(PetscLayout);
140: $ PetscLayoutGetSize(PetscLayout,PetscInt *);

142:   If range exists, and local size is not set, everything gets computed from the range.

144:   If the local size, global size are already set and range exists then this does nothing.

146: .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
147:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutCreate()
148: @*/
149: PetscErrorCode PetscLayoutSetUp(PetscLayout map)
150: {
151:   PetscMPIInt    rank,size;
152:   PetscInt       p;

156:   if ((map->n >= 0) && (map->N >= 0) && (map->range)) return(0);
157:   if (map->range && map->n < 0) {
158:     PetscLayoutSetUp_SizesFromRanges_Private(map);
159:     return(0);
160:   }

162:   if (map->n > 0 && map->bs > 1) {
163:     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
164:   }
165:   if (map->N > 0 && map->bs > 1) {
166:     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
167:   }

169:   MPI_Comm_size(map->comm, &size);
170:   MPI_Comm_rank(map->comm, &rank);
171:   if (map->n > 0) map->n = map->n/PetscAbs(map->bs);
172:   if (map->N > 0) map->N = map->N/PetscAbs(map->bs);
173:   PetscSplitOwnership(map->comm,&map->n,&map->N);
174:   map->n = map->n*PetscAbs(map->bs);
175:   map->N = map->N*PetscAbs(map->bs);
176:   if (!map->range) {
177:     PetscMalloc1(size+1, &map->range);
178:   }
179:   MPI_Allgather(&map->n, 1, MPIU_INT, map->range+1, 1, MPIU_INT, map->comm);

181:   map->range[0] = 0;
182:   for (p = 2; p <= size; p++) map->range[p] += map->range[p-1];

184:   map->rstart = map->range[rank];
185:   map->rend   = map->range[rank+1];
186:   return(0);
187: }

189: /*@
190:   PetscLayoutDuplicate - creates a new PetscLayout with the same information as a given one. If the PetscLayout already exists it is destroyed first.

192:   Collective on PetscLayout

194:   Input Parameter:
195: . in - input PetscLayout to be duplicated

197:   Output Parameter:
198: . out - the copy

200:   Level: developer

202:   Notes:
203:     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout

205: .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutReference()
206: @*/
207: PetscErrorCode PetscLayoutDuplicate(PetscLayout in,PetscLayout *out)
208: {
209:   PetscMPIInt    size;
211:   MPI_Comm       comm = in->comm;

214:   PetscLayoutDestroy(out);
215:   PetscLayoutCreate(comm,out);
216:   MPI_Comm_size(comm,&size);
217:   PetscMemcpy(*out,in,sizeof(struct _n_PetscLayout));
218:   PetscMalloc1(size+1,&(*out)->range);
219:   PetscArraycpy((*out)->range,in->range,size+1);

221:   (*out)->refcnt = 0;
222:   return(0);
223: }

225: /*@
226:   PetscLayoutReference - Causes a PETSc Vec or Mat to share a PetscLayout with one that already exists. Used by Vec/MatDuplicate_XXX()

228:   Collective on PetscLayout

230:   Input Parameter:
231: . in - input PetscLayout to be copied

233:   Output Parameter:
234: . out - the reference location

236:   Level: developer

238:   Notes:
239:     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout

241:   If the out location already contains a PetscLayout it is destroyed

243: .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
244: @*/
245: PetscErrorCode PetscLayoutReference(PetscLayout in,PetscLayout *out)
246: {

250:   in->refcnt++;
251:   PetscLayoutDestroy(out);
252:   *out = in;
253:   return(0);
254: }

256: /*@
257:   PetscLayoutSetISLocalToGlobalMapping - sets a ISLocalGlobalMapping into a PetscLayout

259:   Collective on PetscLayout

261:   Input Parameter:
262: + in - input PetscLayout
263: - ltog - the local to global mapping


266:   Level: developer

268:   Notes:
269:     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout

271:   If the ltog location already contains a PetscLayout it is destroyed

273: .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
274: @*/
275: PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog)
276: {
278:   PetscInt       bs;

281:   ISLocalToGlobalMappingGetBlockSize(ltog,&bs);
282:   if (in->bs > 0 && (bs != 1) && in->bs != bs) SETERRQ2(in->comm,PETSC_ERR_PLIB,"Blocksize of layout %D must match that of mapping %D (or the latter must be 1)",in->bs,bs);
283:   PetscObjectReference((PetscObject)ltog);
284:   ISLocalToGlobalMappingDestroy(&in->mapping);
285:   in->mapping = ltog;
286:   return(0);
287: }

289: /*@
290:   PetscLayoutSetLocalSize - Sets the local size for a PetscLayout object.

292:   Collective on PetscLayout

294:   Input Parameters:
295: + map - pointer to the map
296: - n - the local size

298:   Level: developer

300:   Notes:
301:   Call this after the call to PetscLayoutCreate()

303: .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
304:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
305: @*/
306: PetscErrorCode PetscLayoutSetLocalSize(PetscLayout map,PetscInt n)
307: {
309:   if (map->bs > 1 && n % map->bs) SETERRQ2(map->comm,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",n,map->bs);
310:   map->n = n;
311:   return(0);
312: }

314: /*@C
315:      PetscLayoutGetLocalSize - Gets the local size for a PetscLayout object.

317:     Not Collective

319:    Input Parameters:
320: .    map - pointer to the map

322:    Output Parameters:
323: .    n - the local size

325:    Level: developer

327:     Notes:
328:        Call this after the call to PetscLayoutSetUp()

330:     Fortran Notes:
331:       Not available from Fortran

333: .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
334:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()

336: @*/
337: PetscErrorCode  PetscLayoutGetLocalSize(PetscLayout map,PetscInt *n)
338: {
340:   *n = map->n;
341:   return(0);
342: }

344: /*@
345:   PetscLayoutSetSize - Sets the global size for a PetscLayout object.

347:   Logically Collective on PetscLayout

349:   Input Parameters:
350: + map - pointer to the map
351: - n - the global size

353:   Level: developer

355:   Notes:
356:   Call this after the call to PetscLayoutCreate()

358: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
359:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
360: @*/
361: PetscErrorCode PetscLayoutSetSize(PetscLayout map,PetscInt n)
362: {
364:   map->N = n;
365:   return(0);
366: }

368: /*@
369:   PetscLayoutGetSize - Gets the global size for a PetscLayout object.

371:   Not Collective

373:   Input Parameters:
374: . map - pointer to the map

376:   Output Parameters:
377: . n - the global size

379:   Level: developer

381:   Notes:
382:   Call this after the call to PetscLayoutSetUp()

384: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
385:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
386: @*/
387: PetscErrorCode PetscLayoutGetSize(PetscLayout map,PetscInt *n)
388: {
390:   *n = map->N;
391:   return(0);
392: }

394: /*@
395:   PetscLayoutSetBlockSize - Sets the block size for a PetscLayout object.

397:   Logically Collective on PetscLayout

399:   Input Parameters:
400: + map - pointer to the map
401: - bs - the size

403:   Level: developer

405:   Notes:
406:   Call this after the call to PetscLayoutCreate()

408: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
409:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
410: @*/
411: PetscErrorCode PetscLayoutSetBlockSize(PetscLayout map,PetscInt bs)
412: {
414:   if (bs < 0) return(0);
415:   if (map->n > 0 && map->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",map->n,bs);
416:   if (map->mapping) {
417:     PetscInt       obs;

420:     ISLocalToGlobalMappingGetBlockSize(map->mapping,&obs);
421:     if (obs > 1) {
422:       ISLocalToGlobalMappingSetBlockSize(map->mapping,bs);
423:     }
424:   }
425:   map->bs = bs;
426:   return(0);
427: }

429: /*@
430:   PetscLayoutGetBlockSize - Gets the block size for a PetscLayout object.

432:   Not Collective

434:   Input Parameters:
435: . map - pointer to the map

437:   Output Parameters:
438: . bs - the size

440:   Level: developer

442:   Notes:
443:   Call this after the call to PetscLayoutSetUp()

445: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
446:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize()
447: @*/
448: PetscErrorCode PetscLayoutGetBlockSize(PetscLayout map,PetscInt *bs)
449: {
451:   *bs = PetscAbs(map->bs);
452:   return(0);
453: }

455: /*@
456:   PetscLayoutGetRange - gets the range of values owned by this process

458:   Not Collective

460:   Input Parameters:
461: . map - pointer to the map

463:   Output Parameters:
464: + rstart - first index owned by this process
465: - rend   - one more than the last index owned by this process

467:   Level: developer

469:   Notes:
470:   Call this after the call to PetscLayoutSetUp()

472: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
473:           PetscLayoutGetSize(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
474: @*/
475: PetscErrorCode PetscLayoutGetRange(PetscLayout map,PetscInt *rstart,PetscInt *rend)
476: {
478:   if (rstart) *rstart = map->rstart;
479:   if (rend)   *rend   = map->rend;
480:   return(0);
481: }

483: /*@C
484:      PetscLayoutGetRanges - gets the range of values owned by all processes

486:     Not Collective

488:    Input Parameters:
489: .    map - pointer to the map

491:    Output Parameters:
492: .    range - start of each processors range of indices (the final entry is one more then the
493:              last index on the last process)

495:    Level: developer

497:     Notes:
498:        Call this after the call to PetscLayoutSetUp()

500:     Fortran Notes:
501:       Not available from Fortran

503: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
504:           PetscLayoutGetSize(), PetscLayoutGetRange(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()

506: @*/
507: PetscErrorCode  PetscLayoutGetRanges(PetscLayout map,const PetscInt *range[])
508: {
510:   *range = map->range;
511:   return(0);
512: }

514: /*@C
515:    PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout

517:    Collective

519:    Input Arguments:
520: +  sf - star forest
521: .  layout - PetscLayout defining the global space
522: .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
523: .  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
524: .  localmode - copy mode for ilocal
525: -  iremote - remote locations of root vertices for each leaf on the current process

527:    Level: intermediate

529:    Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
530:    encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
531:    needed

533: .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
534: @*/
535: PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote)
536: {
538:   PetscInt       i,nroots;
539:   PetscSFNode    *remote;

542:   PetscLayoutGetLocalSize(layout,&nroots);
543:   PetscMalloc1(nleaves,&remote);
544:   for (i=0; i<nleaves; i++) {
545:     PetscInt owner = -1;
546:     PetscLayoutFindOwner(layout,iremote[i],&owner);
547:     remote[i].rank  = owner;
548:     remote[i].index = iremote[i] - layout->range[owner];
549:   }
550:   PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);
551:   return(0);
552: }

554: /*@
555:   PetscLayoutCompare - Compares two layouts

557:   Not Collective

559:   Input Parameters:
560: + mapa - pointer to the first map
561: - mapb - pointer to the second map

563:   Output Parameters:
564: . congruent - PETSC_TRUE if the two layouts are congruent, PETSC_FALSE otherwise

566:   Level: beginner

568:   Notes:

570: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
571:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
572: @*/
573: PetscErrorCode PetscLayoutCompare(PetscLayout mapa,PetscLayout mapb,PetscBool *congruent)
574: {
576:   PetscMPIInt    sizea,sizeb;

579:   *congruent = PETSC_FALSE;
580:   MPI_Comm_size(mapa->comm,&sizea);
581:   MPI_Comm_size(mapb->comm,&sizeb);
582:   if (mapa->N == mapb->N && mapa->range && mapb->range && sizea == sizeb) {
583:     PetscArraycmp(mapa->range,mapb->range,sizea+1,congruent);
584:   }
585:   return(0);
586: }

588: /* TODO: handle nooffprocentries like MatZeroRowsMapLocal_Private, since this code is the same */
589: PetscErrorCode PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
590: {
591:   PetscInt      *owners = map->range;
592:   PetscInt       n      = map->n;
593:   PetscSF        sf;
594:   PetscInt      *lidxs,*work = NULL;
595:   PetscSFNode   *ridxs;
596:   PetscMPIInt    rank;
597:   PetscInt       r, p = 0, len = 0;

601:   if (on) *on = 0;              /* squelch -Wmaybe-uninitialized */
602:   /* Create SF where leaves are input idxs and roots are owned idxs */
603:   MPI_Comm_rank(map->comm,&rank);
604:   PetscMalloc1(n,&lidxs);
605:   for (r = 0; r < n; ++r) lidxs[r] = -1;
606:   PetscMalloc1(N,&ridxs);
607:   for (r = 0; r < N; ++r) {
608:     const PetscInt idx = idxs[r];
609:     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
610:     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
611:       PetscLayoutFindOwner(map,idx,&p);
612:     }
613:     ridxs[r].rank = p;
614:     ridxs[r].index = idxs[r] - owners[p];
615:   }
616:   PetscSFCreate(map->comm,&sf);
617:   PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);
618:   PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);
619:   PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);
620:   if (ogidxs) { /* communicate global idxs */
621:     PetscInt cum = 0,start,*work2;

623:     PetscMalloc1(n,&work);
624:     PetscCalloc1(N,&work2);
625:     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
626:     MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);
627:     start -= cum;
628:     cum = 0;
629:     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
630:     PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);
631:     PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);
632:     PetscFree(work2);
633:   }
634:   PetscSFDestroy(&sf);
635:   /* Compress and put in indices */
636:   for (r = 0; r < n; ++r)
637:     if (lidxs[r] >= 0) {
638:       if (work) work[len] = work[r];
639:       lidxs[len++] = r;
640:     }
641:   if (on) *on = len;
642:   if (oidxs) *oidxs = lidxs;
643:   if (ogidxs) *ogidxs = work;
644:   return(0);
645: }