Actual source code: sf.c

petsc-3.8.3 2017-12-09
Report Typos and Errors
  1:  #include <petsc/private/sfimpl.h>
  2:  #include <petscctable.h>

  4: /* Logging support */
  5: PetscLogEvent PETSCSF_SetGraph, PETSCSF_BcastBegin, PETSCSF_BcastEnd, PETSCSF_ReduceBegin, PETSCSF_ReduceEnd, PETSCSF_FetchAndOpBegin, PETSCSF_FetchAndOpEnd;

  7: #if defined(PETSC_USE_DEBUG)
  8: #  define PetscSFCheckGraphSet(sf,arg) do {                          \
  9:     if (PetscUnlikely(!(sf)->graphset))                              \
 10:       SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetGraph() on argument %D \"%s\" before %s()",(arg),#sf,PETSC_FUNCTION_NAME); \
 11:   } while (0)
 12: #else
 13: #  define PetscSFCheckGraphSet(sf,arg) do {} while (0)
 14: #endif

 16: const char *const PetscSFDuplicateOptions[] = {"CONFONLY","RANKS","GRAPH","PetscSFDuplicateOption","PETSCSF_DUPLICATE_",0};

 18: /*@C
 19:    PetscSFCreate - create a star forest communication context

 21:    Not Collective

 23:    Input Arguments:
 24: .  comm - communicator on which the star forest will operate

 26:    Output Arguments:
 27: .  sf - new star forest context

 29:    Level: intermediate

 31: .seealso: PetscSFSetGraph(), PetscSFDestroy()
 32: @*/
 33: PetscErrorCode PetscSFCreate(MPI_Comm comm,PetscSF *sf)
 34: {
 36:   PetscSF        b;

 40:   PetscSFInitializePackage();

 42:   PetscHeaderCreate(b,PETSCSF_CLASSID,"PetscSF","Star Forest","PetscSF",comm,PetscSFDestroy,PetscSFView);

 44:   b->nroots    = -1;
 45:   b->nleaves   = -1;
 46:   b->nranks    = -1;
 47:   b->rankorder = PETSC_TRUE;
 48:   b->ingroup   = MPI_GROUP_NULL;
 49:   b->outgroup  = MPI_GROUP_NULL;
 50:   b->graphset  = PETSC_FALSE;

 52:   *sf = b;
 53:   return(0);
 54: }

 56: /*@C
 57:    PetscSFReset - Reset a star forest so that different sizes or neighbors can be used

 59:    Collective

 61:    Input Arguments:
 62: .  sf - star forest

 64:    Level: advanced

 66: .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFDestroy()
 67: @*/
 68: PetscErrorCode PetscSFReset(PetscSF sf)
 69: {

 74:   if (sf->ops->Reset) {(*sf->ops->Reset)(sf);}
 75:   sf->mine   = NULL;
 76:   PetscFree(sf->mine_alloc);
 77:   sf->remote = NULL;
 78:   PetscFree(sf->remote_alloc);
 79:   PetscFree4(sf->ranks,sf->roffset,sf->rmine,sf->rremote);
 80:   sf->nranks = -1;
 81:   PetscFree(sf->degree);
 82:   if (sf->ingroup  != MPI_GROUP_NULL) {MPI_Group_free(&sf->ingroup);}
 83:   if (sf->outgroup != MPI_GROUP_NULL) {MPI_Group_free(&sf->outgroup);}
 84:   PetscSFDestroy(&sf->multi);
 85:   sf->graphset = PETSC_FALSE;
 86:   sf->setupcalled = PETSC_FALSE;
 87:   return(0);
 88: }

 90: /*@C
 91:    PetscSFSetType - set the PetscSF communication implementation

 93:    Collective on PetscSF

 95:    Input Parameters:
 96: +  sf - the PetscSF context
 97: -  type - a known method

 99:    Options Database Key:
100: .  -sf_type <type> - Sets the method; use -help for a list
101:    of available methods (for instance, window, pt2pt, neighbor)

103:    Notes:
104:    See "include/petscsf.h" for available methods (for instance)
105: +    PETSCSFWINDOW - MPI-2/3 one-sided
106: -    PETSCSFBASIC - basic implementation using MPI-1 two-sided

108:   Level: intermediate

110: .keywords: PetscSF, set, type

112: .seealso: PetscSFType, PetscSFCreate()
113: @*/
114: PetscErrorCode PetscSFSetType(PetscSF sf,PetscSFType type)
115: {
116:   PetscErrorCode ierr,(*r)(PetscSF);
117:   PetscBool      match;


123:   PetscObjectTypeCompare((PetscObject)sf,type,&match);
124:   if (match) return(0);

126:   PetscFunctionListFind(PetscSFList,type,&r);
127:   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested PetscSF type %s",type);
128:   /* Destroy the previous private PetscSF context */
129:   if (sf->ops->Destroy) {
130:     (*(sf)->ops->Destroy)(sf);
131:   }
132:   PetscMemzero(sf->ops,sizeof(*sf->ops));
133:   PetscObjectChangeTypeName((PetscObject)sf,type);
134:   (*r)(sf);
135:   return(0);
136: }

138: /*@
139:    PetscSFDestroy - destroy star forest

141:    Collective

143:    Input Arguments:
144: .  sf - address of star forest

146:    Level: intermediate

148: .seealso: PetscSFCreate(), PetscSFReset()
149: @*/
150: PetscErrorCode PetscSFDestroy(PetscSF *sf)
151: {

155:   if (!*sf) return(0);
157:   if (--((PetscObject)(*sf))->refct > 0) {*sf = 0; return(0);}
158:   PetscSFReset(*sf);
159:   if ((*sf)->ops->Destroy) {(*(*sf)->ops->Destroy)(*sf);}
160:   PetscHeaderDestroy(sf);
161:   return(0);
162: }

164: /*@
165:    PetscSFSetUp - set up communication structures

167:    Collective

169:    Input Arguments:
170: .  sf - star forest communication object

172:    Level: beginner

174: .seealso: PetscSFSetFromOptions(), PetscSFSetType()
175: @*/
176: PetscErrorCode PetscSFSetUp(PetscSF sf)
177: {

181:   if (sf->setupcalled) return(0);
182:   if (!((PetscObject)sf)->type_name) {PetscSFSetType(sf,PETSCSFBASIC);}
183:   if (sf->ops->SetUp) {(*sf->ops->SetUp)(sf);}
184:   sf->setupcalled = PETSC_TRUE;
185:   return(0);
186: }

188: /*@C
189:    PetscSFSetFromOptions - set PetscSF options using the options database

191:    Logically Collective

193:    Input Arguments:
194: .  sf - star forest

196:    Options Database Keys:
197: +  -sf_type - implementation type, see PetscSFSetType()
198: -  -sf_rank_order - sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise

200:    Level: intermediate

202: .keywords: KSP, set, from, options, database

204: .seealso: PetscSFWindowSetSyncType()
205: @*/
206: PetscErrorCode PetscSFSetFromOptions(PetscSF sf)
207: {
208:   PetscSFType    deft;
209:   char           type[256];
211:   PetscBool      flg;

215:   PetscObjectOptionsBegin((PetscObject)sf);
216:   deft = ((PetscObject)sf)->type_name ? ((PetscObject)sf)->type_name : PETSCSFBASIC;
217:   PetscOptionsFList("-sf_type","PetscSF implementation type","PetscSFSetType",PetscSFList,deft,type,256,&flg);
218:   PetscSFSetType(sf,flg ? type : deft);
219:   PetscOptionsBool("-sf_rank_order","sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise","PetscSFSetRankOrder",sf->rankorder,&sf->rankorder,NULL);
220:   if (sf->ops->SetFromOptions) {(*sf->ops->SetFromOptions)(PetscOptionsObject,sf);}
221:   PetscOptionsEnd();
222:   return(0);
223: }

225: /*@C
226:    PetscSFSetRankOrder - sort multi-points for gathers and scatters by rank order

228:    Logically Collective

230:    Input Arguments:
231: +  sf - star forest
232: -  flg - PETSC_TRUE to sort, PETSC_FALSE to skip sorting (lower setup cost, but non-deterministic)

234:    Level: advanced

236: .seealso: PetscSFGatherBegin(), PetscSFScatterBegin()
237: @*/
238: PetscErrorCode PetscSFSetRankOrder(PetscSF sf,PetscBool flg)
239: {

244:   if (sf->multi) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Rank ordering must be set before first call to PetscSFGatherBegin() or PetscSFScatterBegin()");
245:   sf->rankorder = flg;
246:   return(0);
247: }

249: /*@C
250:    PetscSFSetGraph - Set a parallel star forest

252:    Collective

254:    Input Arguments:
255: +  sf - star forest
256: .  nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves)
257: .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
258: .  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
259: .  localmode - copy mode for ilocal
260: .  iremote - remote locations of root vertices for each leaf on the current process
261: -  remotemode - copy mode for iremote

263:    Level: intermediate

265: .seealso: PetscSFCreate(), PetscSFView(), PetscSFGetGraph()
266: @*/
267: PetscErrorCode PetscSFSetGraph(PetscSF sf,PetscInt nroots,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscSFNode *iremote,PetscCopyMode remotemode)
268: {
269:   PetscErrorCode     ierr;

273:   PetscLogEventBegin(PETSCSF_SetGraph,sf,0,0,0);
276:   if (nroots < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"roots %D, cannot be negative",nroots);
277:   if (nleaves < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nleaves %D, cannot be negative",nleaves);
278:   PetscSFReset(sf);
279:   sf->nroots  = nroots;
280:   sf->nleaves = nleaves;
281:   if (ilocal) {
282:     PetscInt i;
283:     switch (localmode) {
284:     case PETSC_COPY_VALUES:
285:       PetscMalloc1(nleaves,&sf->mine_alloc);
286:       sf->mine    = sf->mine_alloc;
287:       PetscMemcpy(sf->mine,ilocal,nleaves*sizeof(*sf->mine));
288:       sf->minleaf = PETSC_MAX_INT;
289:       sf->maxleaf = PETSC_MIN_INT;
290:       for (i=0; i<nleaves; i++) {
291:         sf->minleaf = PetscMin(sf->minleaf,ilocal[i]);
292:         sf->maxleaf = PetscMax(sf->maxleaf,ilocal[i]);
293:       }
294:       break;
295:     case PETSC_OWN_POINTER:
296:       sf->mine_alloc = (PetscInt*)ilocal;
297:       sf->mine       = sf->mine_alloc;
298:       break;
299:     case PETSC_USE_POINTER:
300:       sf->mine = (PetscInt*)ilocal;
301:       break;
302:     default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown localmode");
303:     }
304:   }
305:   if (!ilocal || nleaves > 0) {
306:     sf->minleaf = 0;
307:     sf->maxleaf = nleaves - 1;
308:   }
309:   switch (remotemode) {
310:   case PETSC_COPY_VALUES:
311:     PetscMalloc1(nleaves,&sf->remote_alloc);
312:     sf->remote = sf->remote_alloc;
313:     PetscMemcpy(sf->remote,iremote,nleaves*sizeof(*sf->remote));
314:     break;
315:   case PETSC_OWN_POINTER:
316:     sf->remote_alloc = (PetscSFNode*)iremote;
317:     sf->remote       = sf->remote_alloc;
318:     break;
319:   case PETSC_USE_POINTER:
320:     sf->remote = (PetscSFNode*)iremote;
321:     break;
322:   default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown remotemode");
323:   }

325:   sf->graphset = PETSC_TRUE;
326:   PetscLogEventEnd(PETSCSF_SetGraph,sf,0,0,0);
327:   return(0);
328: }

330: /*@C
331:    PetscSFCreateInverseSF - given a PetscSF in which all vertices have degree 1, creates the inverse map

333:    Collective

335:    Input Arguments:
336: .  sf - star forest to invert

338:    Output Arguments:
339: .  isf - inverse of sf

341:    Level: advanced

343:    Notes:
344:    All roots must have degree 1.

346:    The local space may be a permutation, but cannot be sparse.

348: .seealso: PetscSFSetGraph()
349: @*/
350: PetscErrorCode PetscSFCreateInverseSF(PetscSF sf,PetscSF *isf)
351: {
353:   PetscMPIInt    rank;
354:   PetscInt       i,nroots,nleaves,maxlocal,count,*newilocal;
355:   const PetscInt *ilocal;
356:   PetscSFNode    *roots,*leaves;

359:   MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
360:   PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);
361:   for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,(ilocal ? ilocal[i] : i)+1);
362:   PetscMalloc2(nroots,&roots,maxlocal,&leaves);
363:   for (i=0; i<maxlocal; i++) {
364:     leaves[i].rank  = rank;
365:     leaves[i].index = i;
366:   }
367:   for (i=0; i <nroots; i++) {
368:     roots[i].rank  = -1;
369:     roots[i].index = -1;
370:   }
371:   PetscSFReduceBegin(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);
372:   PetscSFReduceEnd(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);

374:   /* Check whether our leaves are sparse */
375:   for (i=0,count=0; i<nroots; i++) if (roots[i].rank >= 0) count++;
376:   if (count == nroots) newilocal = NULL;
377:   else {                        /* Index for sparse leaves and compact "roots" array (which is to become our leaves). */
378:     PetscMalloc1(count,&newilocal);
379:     for (i=0,count=0; i<nroots; i++) {
380:       if (roots[i].rank >= 0) {
381:         newilocal[count]   = i;
382:         roots[count].rank  = roots[i].rank;
383:         roots[count].index = roots[i].index;
384:         count++;
385:       }
386:     }
387:   }

389:   PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,isf);
390:   PetscSFSetGraph(*isf,maxlocal,count,newilocal,PETSC_OWN_POINTER,roots,PETSC_COPY_VALUES);
391:   PetscFree2(roots,leaves);
392:   return(0);
393: }

395: /*@
396:    PetscSFDuplicate - duplicate a PetscSF, optionally preserving rank connectivity and graph

398:    Collective

400:    Input Arguments:
401: +  sf - communication object to duplicate
402: -  opt - PETSCSF_DUPLICATE_CONFONLY, PETSCSF_DUPLICATE_RANKS, or PETSCSF_DUPLICATE_GRAPH (see PetscSFDuplicateOption)

404:    Output Arguments:
405: .  newsf - new communication object

407:    Level: beginner

409: .seealso: PetscSFCreate(), PetscSFSetType(), PetscSFSetGraph()
410: @*/
411: PetscErrorCode PetscSFDuplicate(PetscSF sf,PetscSFDuplicateOption opt,PetscSF *newsf)
412: {

416:   PetscSFCreate(PetscObjectComm((PetscObject)sf),newsf);
417:   PetscSFSetType(*newsf,((PetscObject)sf)->type_name);
418:   if (sf->ops->Duplicate) {(*sf->ops->Duplicate)(sf,opt,*newsf);}
419:   if (opt == PETSCSF_DUPLICATE_GRAPH) {
420:     PetscInt          nroots,nleaves;
421:     const PetscInt    *ilocal;
422:     const PetscSFNode *iremote;
423:     PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,&iremote);
424:     PetscSFSetGraph(*newsf,nroots,nleaves,ilocal,PETSC_COPY_VALUES,iremote,PETSC_COPY_VALUES);
425:   }
426:   return(0);
427: }

429: /*@C
430:    PetscSFGetGraph - Get the graph specifying a parallel star forest

432:    Not Collective

434:    Input Arguments:
435: .  sf - star forest

437:    Output Arguments:
438: +  nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves)
439: .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
440: .  ilocal - locations of leaves in leafdata buffers
441: -  iremote - remote locations of root vertices for each leaf on the current process

443:    Level: intermediate

445: .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph()
446: @*/
447: PetscErrorCode PetscSFGetGraph(PetscSF sf,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote)
448: {

452:   /* We are not currently requiring that the graph is set, thus returning nroots=-1 if it has not been set */
453:   /* if (!sf->graphset) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Graph has not been set, must call PetscSFSetGraph()"); */
454:   if (nroots) *nroots = sf->nroots;
455:   if (nleaves) *nleaves = sf->nleaves;
456:   if (ilocal) *ilocal = sf->mine;
457:   if (iremote) *iremote = sf->remote;
458:   return(0);
459: }

461: /*@C
462:    PetscSFGetLeafRange - Get the active leaf ranges

464:    Not Collective

466:    Input Arguments:
467: .  sf - star forest

469:    Output Arguments:
470: +  minleaf - minimum active leaf on this process
471: -  maxleaf - maximum active leaf on this process

473:    Level: developer

475: .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
476: @*/
477: PetscErrorCode PetscSFGetLeafRange(PetscSF sf,PetscInt *minleaf,PetscInt *maxleaf)
478: {

482:   if (minleaf) *minleaf = sf->minleaf;
483:   if (maxleaf) *maxleaf = sf->maxleaf;
484:   return(0);
485: }

487: /*@C
488:    PetscSFView - view a star forest

490:    Collective

492:    Input Arguments:
493: +  sf - star forest
494: -  viewer - viewer to display graph, for example PETSC_VIEWER_STDOUT_WORLD

496:    Level: beginner

498: .seealso: PetscSFCreate(), PetscSFSetGraph()
499: @*/
500: PetscErrorCode PetscSFView(PetscSF sf,PetscViewer viewer)
501: {
502:   PetscErrorCode    ierr;
503:   PetscBool         iascii;
504:   PetscViewerFormat format;

508:   if (!viewer) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)sf),&viewer);}
511:   PetscSFSetUp(sf);
512:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
513:   if (iascii) {
514:     PetscMPIInt rank;
515:     PetscInt    ii,i,j;

517:     PetscObjectPrintClassNamePrefixType((PetscObject)sf,viewer);
518:     PetscViewerASCIIPushTab(viewer);
519:     if (sf->ops->View) {(*sf->ops->View)(sf,viewer);}
520:     MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
521:     PetscViewerASCIIPushSynchronized(viewer);
522:     PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Number of roots=%D, leaves=%D, remote ranks=%D\n",rank,sf->nroots,sf->nleaves,sf->nranks);
523:     for (i=0; i<sf->nleaves; i++) {
524:       PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- (%D,%D)\n",rank,sf->mine ? sf->mine[i] : i,sf->remote[i].rank,sf->remote[i].index);
525:     }
526:     PetscViewerFlush(viewer);
527:     PetscViewerGetFormat(viewer,&format);
528:     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
529:       PetscMPIInt *tmpranks,*perm;
530:       PetscMalloc2(sf->nranks,&tmpranks,sf->nranks,&perm);
531:       PetscMemcpy(tmpranks,sf->ranks,sf->nranks*sizeof(tmpranks[0]));
532:       for (i=0; i<sf->nranks; i++) perm[i] = i;
533:       PetscSortMPIIntWithArray(sf->nranks,tmpranks,perm);
534:       PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Roots referenced by my leaves, by rank\n",rank);
535:       for (ii=0; ii<sf->nranks; ii++) {
536:         i = perm[ii];
537:         PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d: %D edges\n",rank,sf->ranks[i],sf->roffset[i+1]-sf->roffset[i]);
538:         for (j=sf->roffset[i]; j<sf->roffset[i+1]; j++) {
539:           PetscViewerASCIISynchronizedPrintf(viewer,"[%d]    %D <- %D\n",rank,sf->rmine[j],sf->rremote[j]);
540:         }
541:       }
542:       PetscFree2(tmpranks,perm);
543:     }
544:     PetscViewerFlush(viewer);
545:     PetscViewerASCIIPopSynchronized(viewer);
546:     PetscViewerASCIIPopTab(viewer);
547:   }
548:   return(0);
549: }

551: /*@C
552:    PetscSFGetRanks - Get ranks and number of vertices referenced by leaves on this process

554:    Not Collective

556:    Input Arguments:
557: .  sf - star forest

559:    Output Arguments:
560: +  nranks - number of ranks referenced by local part
561: .  ranks - array of ranks
562: .  roffset - offset in rmine/rremote for each rank (length nranks+1)
563: .  rmine - concatenated array holding local indices referencing each remote rank
564: -  rremote - concatenated array holding remote indices referenced for each remote rank

566:    Level: developer

568: .seealso: PetscSFSetGraph()
569: @*/
570: PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote)
571: {

575:   if (!sf->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetUp before obtaining ranks");
576:   if (nranks)  *nranks  = sf->nranks;
577:   if (ranks)   *ranks   = sf->ranks;
578:   if (roffset) *roffset = sf->roffset;
579:   if (rmine)   *rmine   = sf->rmine;
580:   if (rremote) *rremote = sf->rremote;
581:   return(0);
582: }

584: static PetscBool InList(PetscMPIInt needle,PetscMPIInt n,const PetscMPIInt *list) {
585:   PetscInt i;
586:   for (i=0; i<n; i++) {
587:     if (needle == list[i]) return PETSC_TRUE;
588:   }
589:   return PETSC_FALSE;
590: }

592: /*@C
593:    PetscSFSetUpRanks - Set up data structures associated with ranks; this is for internal use by PetscSF implementations.

595:    Collective

597:    Input Arguments:
598: +  sf - PetscSF to set up; PetscSFSetGraph() must have been called
599: -  dgroup - MPI_Group of ranks to be distinguished (e.g., for self or shared memory exchange)

601:    Level: developer

603: .seealso: PetscSFGetRanks()
604: @*/
605: PetscErrorCode PetscSFSetUpRanks(PetscSF sf,MPI_Group dgroup)
606: {
607:   PetscErrorCode     ierr;
608:   PetscTable         table;
609:   PetscTablePosition pos;
610:   PetscMPIInt        size,groupsize,*groupranks;
611:   PetscInt           i,*rcount,*ranks;

615:   if (!sf->graphset) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"PetscSFSetGraph() has not been called yet");
616:   MPI_Comm_size(PetscObjectComm((PetscObject)sf),&size);
617:   PetscTableCreate(10,size,&table);
618:   for (i=0; i<sf->nleaves; i++) {
619:     /* Log 1-based rank */
620:     PetscTableAdd(table,sf->remote[i].rank+1,1,ADD_VALUES);
621:   }
622:   PetscTableGetCount(table,&sf->nranks);
623:   PetscMalloc4(sf->nranks,&sf->ranks,sf->nranks+1,&sf->roffset,sf->nleaves,&sf->rmine,sf->nleaves,&sf->rremote);
624:   PetscMalloc2(sf->nranks,&rcount,sf->nranks,&ranks);
625:   PetscTableGetHeadPosition(table,&pos);
626:   for (i=0; i<sf->nranks; i++) {
627:     PetscTableGetNext(table,&pos,&ranks[i],&rcount[i]);
628:     ranks[i]--;             /* Convert back to 0-based */
629:   }
630:   PetscTableDestroy(&table);

632:   /* We expect that dgroup is reliably "small" while nranks could be large */
633:   {
634:     MPI_Group group;
635:     PetscMPIInt *dgroupranks;
636:     MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);
637:     MPI_Group_size(dgroup,&groupsize);
638:     PetscMalloc1(groupsize,&dgroupranks);
639:     PetscMalloc1(groupsize,&groupranks);
640:     for (i=0; i<groupsize; i++) dgroupranks[i] = i;
641:     if (groupsize) {MPI_Group_translate_ranks(dgroup,groupsize,dgroupranks,group,groupranks);}
642:     MPI_Group_free(&group);
643:     PetscFree(dgroupranks);
644:   }

646:   /* Partition ranks[] into distinguished (first sf->ndranks) followed by non-distinguished */
647:   for (sf->ndranks=0,i=sf->nranks; sf->ndranks<i; ) {
648:     for (i--; sf->ndranks<i; i--) { /* Scan i backward looking for distinguished rank */
649:       if (InList(ranks[i],groupsize,groupranks)) break;
650:     }
651:     for ( ; sf->ndranks<=i; sf->ndranks++) { /* Scan sf->ndranks forward looking for non-distinguished rank */
652:       if (!InList(ranks[sf->ndranks],groupsize,groupranks)) break;
653:     }
654:     if (sf->ndranks < i) {                         /* Swap ranks[sf->ndranks] with ranks[i] */
655:       PetscInt    tmprank,tmpcount;
656:       tmprank = ranks[i];
657:       tmpcount = rcount[i];
658:       ranks[i] = ranks[sf->ndranks];
659:       rcount[i] = rcount[sf->ndranks];
660:       ranks[sf->ndranks] = tmprank;
661:       rcount[sf->ndranks] = tmpcount;
662:       sf->ndranks++;
663:     }
664:   }
665:   PetscFree(groupranks);
666:   PetscSortIntWithArray(sf->ndranks,ranks,rcount);
667:   PetscSortIntWithArray(sf->nranks-sf->ndranks,ranks+sf->ndranks,rcount+sf->ndranks);
668:   sf->roffset[0] = 0;
669:   for (i=0; i<sf->nranks; i++) {
670:     PetscMPIIntCast(ranks[i],sf->ranks+i);
671:     sf->roffset[i+1] = sf->roffset[i] + rcount[i];
672:     rcount[i]        = 0;
673:   }
674:   for (i=0; i<sf->nleaves; i++) {
675:     PetscInt irank;
676:     /* Search for index of iremote[i].rank in sf->ranks */
677:     PetscFindMPIInt(sf->remote[i].rank,sf->ndranks,sf->ranks,&irank);
678:     if (irank < 0) {
679:       PetscFindMPIInt(sf->remote[i].rank,sf->nranks-sf->ndranks,sf->ranks+sf->ndranks,&irank);
680:       if (irank >= 0) irank += sf->ndranks;
681:     }
682:     if (irank < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Could not find rank %D in array",sf->remote[i].rank);
683:     sf->rmine[sf->roffset[irank] + rcount[irank]]   = sf->mine ? sf->mine[i] : i;
684:     sf->rremote[sf->roffset[irank] + rcount[irank]] = sf->remote[i].index;
685:     rcount[irank]++;
686:   }
687:   PetscFree2(rcount,ranks);
688:   return(0);
689: }

691: /*@C
692:    PetscSFGetGroups - gets incoming and outgoing process groups

694:    Collective

696:    Input Argument:
697: .  sf - star forest

699:    Output Arguments:
700: +  incoming - group of origin processes for incoming edges (leaves that reference my roots)
701: -  outgoing - group of destination processes for outgoing edges (roots that I reference)

703:    Level: developer

705: .seealso: PetscSFGetWindow(), PetscSFRestoreWindow()
706: @*/
707: PetscErrorCode PetscSFGetGroups(PetscSF sf,MPI_Group *incoming,MPI_Group *outgoing)
708: {
710:   MPI_Group      group;

713:   if (sf->ingroup == MPI_GROUP_NULL) {
714:     PetscInt       i;
715:     const PetscInt *indegree;
716:     PetscMPIInt    rank,*outranks,*inranks;
717:     PetscSFNode    *remote;
718:     PetscSF        bgcount;

720:     /* Compute the number of incoming ranks */
721:     PetscMalloc1(sf->nranks,&remote);
722:     for (i=0; i<sf->nranks; i++) {
723:       remote[i].rank  = sf->ranks[i];
724:       remote[i].index = 0;
725:     }
726:     PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,&bgcount);
727:     PetscSFSetGraph(bgcount,1,sf->nranks,NULL,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);
728:     PetscSFComputeDegreeBegin(bgcount,&indegree);
729:     PetscSFComputeDegreeEnd(bgcount,&indegree);

731:     /* Enumerate the incoming ranks */
732:     PetscMalloc2(indegree[0],&inranks,sf->nranks,&outranks);
733:     MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
734:     for (i=0; i<sf->nranks; i++) outranks[i] = rank;
735:     PetscSFGatherBegin(bgcount,MPI_INT,outranks,inranks);
736:     PetscSFGatherEnd(bgcount,MPI_INT,outranks,inranks);
737:     MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);
738:     MPI_Group_incl(group,indegree[0],inranks,&sf->ingroup);
739:     MPI_Group_free(&group);
740:     PetscFree2(inranks,outranks);
741:     PetscSFDestroy(&bgcount);
742:   }
743:   *incoming = sf->ingroup;

745:   if (sf->outgroup == MPI_GROUP_NULL) {
746:     MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);
747:     MPI_Group_incl(group,sf->nranks,sf->ranks,&sf->outgroup);
748:     MPI_Group_free(&group);
749:   }
750:   *outgoing = sf->outgroup;
751:   return(0);
752: }

754: /*@C
755:    PetscSFGetMultiSF - gets the inner SF implemeting gathers and scatters

757:    Collective

759:    Input Argument:
760: .  sf - star forest that may contain roots with 0 or with more than 1 vertex

762:    Output Arguments:
763: .  multi - star forest with split roots, such that each root has degree exactly 1

765:    Level: developer

767:    Notes:

769:    In most cases, users should use PetscSFGatherBegin() and PetscSFScatterBegin() instead of manipulating multi
770:    directly. Since multi satisfies the stronger condition that each entry in the global space has exactly one incoming
771:    edge, it is a candidate for future optimization that might involve its removal.

773: .seealso: PetscSFSetGraph(), PetscSFGatherBegin(), PetscSFScatterBegin()
774: @*/
775: PetscErrorCode PetscSFGetMultiSF(PetscSF sf,PetscSF *multi)
776: {

782:   if (sf->nroots < 0) {         /* Graph has not been set yet; why do we need this? */
783:     PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);
784:     *multi = sf->multi;
785:     return(0);
786:   }
787:   if (!sf->multi) {
788:     const PetscInt *indegree;
789:     PetscInt       i,*inoffset,*outones,*outoffset,maxlocal;
790:     PetscSFNode    *remote;
791:     PetscSFComputeDegreeBegin(sf,&indegree);
792:     PetscSFComputeDegreeEnd(sf,&indegree);
793:     for (i=0,maxlocal=0; i<sf->nleaves; i++) maxlocal = PetscMax(maxlocal,(sf->mine ? sf->mine[i] : i)+1);
794:     PetscMalloc3(sf->nroots+1,&inoffset,maxlocal,&outones,maxlocal,&outoffset);
795:     inoffset[0] = 0;
796:     for (i=0; i<sf->nroots; i++) inoffset[i+1] = inoffset[i] + indegree[i];
797:     for (i=0; i<maxlocal; i++) outones[i] = 1;
798:     PetscSFFetchAndOpBegin(sf,MPIU_INT,inoffset,outones,outoffset,MPI_SUM);
799:     PetscSFFetchAndOpEnd(sf,MPIU_INT,inoffset,outones,outoffset,MPI_SUM);
800:     for (i=0; i<sf->nroots; i++) inoffset[i] -= indegree[i]; /* Undo the increment */
801: #if 0
802: #if defined(PETSC_USE_DEBUG)                                 /* Check that the expected number of increments occurred */
803:     for (i=0; i<sf->nroots; i++) {
804:       if (inoffset[i] + indegree[i] != inoffset[i+1]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect result after PetscSFFetchAndOp");
805:     }
806: #endif
807: #endif
808:     PetscMalloc1(sf->nleaves,&remote);
809:     for (i=0; i<sf->nleaves; i++) {
810:       remote[i].rank  = sf->remote[i].rank;
811:       remote[i].index = outoffset[sf->mine ? sf->mine[i] : i];
812:     }
813:     PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);
814:     PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,sf->mine,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);
815:     if (sf->rankorder) {        /* Sort the ranks */
816:       PetscMPIInt rank;
817:       PetscInt    *inranks,*newoffset,*outranks,*newoutoffset,*tmpoffset,maxdegree;
818:       PetscSFNode *newremote;
819:       MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
820:       for (i=0,maxdegree=0; i<sf->nroots; i++) maxdegree = PetscMax(maxdegree,indegree[i]);
821:       PetscMalloc5(sf->multi->nroots,&inranks,sf->multi->nroots,&newoffset,maxlocal,&outranks,maxlocal,&newoutoffset,maxdegree,&tmpoffset);
822:       for (i=0; i<maxlocal; i++) outranks[i] = rank;
823:       PetscSFReduceBegin(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);
824:       PetscSFReduceEnd(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);
825:       /* Sort the incoming ranks at each vertex, build the inverse map */
826:       for (i=0; i<sf->nroots; i++) {
827:         PetscInt j;
828:         for (j=0; j<indegree[i]; j++) tmpoffset[j] = j;
829:         PetscSortIntWithArray(indegree[i],inranks+inoffset[i],tmpoffset);
830:         for (j=0; j<indegree[i]; j++) newoffset[inoffset[i] + tmpoffset[j]] = inoffset[i] + j;
831:       }
832:       PetscSFBcastBegin(sf->multi,MPIU_INT,newoffset,newoutoffset);
833:       PetscSFBcastEnd(sf->multi,MPIU_INT,newoffset,newoutoffset);
834:       PetscMalloc1(sf->nleaves,&newremote);
835:       for (i=0; i<sf->nleaves; i++) {
836:         newremote[i].rank  = sf->remote[i].rank;
837:         newremote[i].index = newoutoffset[sf->mine ? sf->mine[i] : i];
838:       }
839:       PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,sf->mine,PETSC_COPY_VALUES,newremote,PETSC_OWN_POINTER);
840:       PetscFree5(inranks,newoffset,outranks,newoutoffset,tmpoffset);
841:     }
842:     PetscFree3(inoffset,outones,outoffset);
843:   }
844:   *multi = sf->multi;
845:   return(0);
846: }

848: /*@C
849:    PetscSFCreateEmbeddedSF - removes edges from all but the selected roots, does not remap indices

851:    Collective

853:    Input Arguments:
854: +  sf - original star forest
855: .  nroots - number of roots to select on this process
856: -  selected - selected roots on this process

858:    Output Arguments:
859: .  newsf - new star forest

861:    Level: advanced

863:    Note:
864:    To use the new PetscSF, it may be necessary to know the indices of the leaves that are still participating. This can
865:    be done by calling PetscSFGetGraph().

867: .seealso: PetscSFSetGraph(), PetscSFGetGraph()
868: @*/
869: PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF sf,PetscInt nroots,const PetscInt *selected,PetscSF *newsf)
870: {
871:   PetscInt      *rootdata, *leafdata, *ilocal;
872:   PetscSFNode   *iremote;
873:   PetscInt       leafsize = 0, nleaves = 0, n, i;

880:   if (sf->mine) for (i = 0; i < sf->nleaves; ++i) {leafsize = PetscMax(leafsize, sf->mine[i]+1);}
881:   else leafsize = sf->nleaves;
882:   PetscCalloc2(sf->nroots,&rootdata,leafsize,&leafdata);
883:   for (i=0; i<nroots; ++i) rootdata[selected[i]] = 1;
884:   PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);
885:   PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);

887:   for (i = 0; i < leafsize; ++i) nleaves += leafdata[i];
888:   PetscMalloc1(nleaves,&ilocal);
889:   PetscMalloc1(nleaves,&iremote);
890:   for (i = 0, n = 0; i < sf->nleaves; ++i) {
891:     const PetscInt lidx = sf->mine ? sf->mine[i] : i;

893:     if (leafdata[lidx]) {
894:       ilocal[n]        = lidx;
895:       iremote[n].rank  = sf->remote[i].rank;
896:       iremote[n].index = sf->remote[i].index;
897:       ++n;
898:     }
899:   }
900:   if (n != nleaves) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "There is a size mismatch in the SF embedding, %d != %d", n, nleaves);
901:   PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,newsf);
902:   PetscSFSetGraph(*newsf,sf->nroots,nleaves,ilocal,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
903:   PetscFree2(rootdata,leafdata);
904:   return(0);
905: }

907: /*@C
908:   PetscSFCreateEmbeddedLeafSF - removes edges from all but the selected leaves, does not remap indices

910:   Collective

912:   Input Arguments:
913: + sf - original star forest
914: . nleaves - number of leaves to select on this process
915: - selected - selected leaves on this process

917:   Output Arguments:
918: .  newsf - new star forest

920:   Level: advanced

922: .seealso: PetscSFCreateEmbeddedSF(), PetscSFSetGraph(), PetscSFGetGraph()
923: @*/
924: PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF sf, PetscInt nleaves, const PetscInt *selected, PetscSF *newsf)
925: {
926:   PetscSFNode   *iremote;
927:   PetscInt      *ilocal;
928:   PetscInt       i;

935:   PetscMalloc1(nleaves, &ilocal);
936:   PetscMalloc1(nleaves, &iremote);
937:   for (i = 0; i < nleaves; ++i) {
938:     const PetscInt l = selected[i];

940:     ilocal[i]        = sf->mine ? sf->mine[l] : l;
941:     iremote[i].rank  = sf->remote[l].rank;
942:     iremote[i].index = sf->remote[l].index;
943:   }
944:   PetscSFDuplicate(sf, PETSCSF_DUPLICATE_RANKS, newsf);
945:   PetscSFSetGraph(*newsf, sf->nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);
946:   return(0);
947: }

949: /*@C
950:    PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd()

952:    Collective on PetscSF

954:    Input Arguments:
955: +  sf - star forest on which to communicate
956: .  unit - data type associated with each node
957: -  rootdata - buffer to broadcast

959:    Output Arguments:
960: .  leafdata - buffer to update with values from each leaf's respective root

962:    Level: intermediate

964: .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin()
965: @*/
966: PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata)
967: {

972:   PetscSFCheckGraphSet(sf,1);
973:   PetscLogEventBegin(PETSCSF_BcastBegin,sf,0,0,0);
974:   PetscSFSetUp(sf);
975:   (*sf->ops->BcastBegin)(sf,unit,rootdata,leafdata);
976:   PetscLogEventEnd(PETSCSF_BcastBegin,sf,0,0,0);
977:   return(0);
978: }

980: /*@C
981:    PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin()

983:    Collective

985:    Input Arguments:
986: +  sf - star forest
987: .  unit - data type
988: -  rootdata - buffer to broadcast

990:    Output Arguments:
991: .  leafdata - buffer to update with values from each leaf's respective root

993:    Level: intermediate

995: .seealso: PetscSFSetGraph(), PetscSFReduceEnd()
996: @*/
997: PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata)
998: {

1003:   PetscSFCheckGraphSet(sf,1);
1004:   PetscLogEventBegin(PETSCSF_BcastEnd,sf,0,0,0);
1005:   PetscSFSetUp(sf);
1006:   (*sf->ops->BcastEnd)(sf,unit,rootdata,leafdata);
1007:   PetscLogEventEnd(PETSCSF_BcastEnd,sf,0,0,0);
1008:   return(0);
1009: }

1011: /*@C
1012:    PetscSFReduceBegin - begin reduction of leafdata into rootdata, to be completed with call to PetscSFReduceEnd()

1014:    Collective

1016:    Input Arguments:
1017: +  sf - star forest
1018: .  unit - data type
1019: .  leafdata - values to reduce
1020: -  op - reduction operation

1022:    Output Arguments:
1023: .  rootdata - result of reduction of values from all leaves of each root

1025:    Level: intermediate

1027: .seealso: PetscSFBcastBegin()
1028: @*/
1029: PetscErrorCode PetscSFReduceBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op)
1030: {

1035:   PetscSFCheckGraphSet(sf,1);
1036:   PetscLogEventBegin(PETSCSF_ReduceBegin,sf,0,0,0);
1037:   PetscSFSetUp(sf);
1038:   (sf->ops->ReduceBegin)(sf,unit,leafdata,rootdata,op);
1039:   PetscLogEventEnd(PETSCSF_ReduceBegin,sf,0,0,0);
1040:   return(0);
1041: }

1043: /*@C
1044:    PetscSFReduceEnd - end a reduction operation started with PetscSFReduceBegin()

1046:    Collective

1048:    Input Arguments:
1049: +  sf - star forest
1050: .  unit - data type
1051: .  leafdata - values to reduce
1052: -  op - reduction operation

1054:    Output Arguments:
1055: .  rootdata - result of reduction of values from all leaves of each root

1057:    Level: intermediate

1059: .seealso: PetscSFSetGraph(), PetscSFBcastEnd()
1060: @*/
1061: PetscErrorCode PetscSFReduceEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op)
1062: {

1067:   PetscSFCheckGraphSet(sf,1);
1068:   PetscLogEventBegin(PETSCSF_ReduceEnd,sf,0,0,0);
1069:   PetscSFSetUp(sf);
1070:   (*sf->ops->ReduceEnd)(sf,unit,leafdata,rootdata,op);
1071:   PetscLogEventEnd(PETSCSF_ReduceEnd,sf,0,0,0);
1072:   return(0);
1073: }

1075: /*@C
1076:    PetscSFComputeDegreeBegin - begin computation of degree for each root vertex, to be completed with PetscSFComputeDegreeEnd()

1078:    Collective

1080:    Input Arguments:
1081: .  sf - star forest

1083:    Output Arguments:
1084: .  degree - degree of each root vertex

1086:    Level: advanced

1088: .seealso: PetscSFGatherBegin()
1089: @*/
1090: PetscErrorCode PetscSFComputeDegreeBegin(PetscSF sf,const PetscInt **degree)
1091: {

1096:   PetscSFCheckGraphSet(sf,1);
1098:   if (!sf->degreeknown) {
1099:     PetscInt i,maxlocal;
1100:     if (sf->degree) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Calls to PetscSFComputeDegreeBegin() cannot be nested.");
1101:     for (i=0,maxlocal=0; i<sf->nleaves; i++) maxlocal = PetscMax(maxlocal,(sf->mine ? sf->mine[i] : i)+1);
1102:     PetscMalloc1(sf->nroots,&sf->degree);
1103:     PetscMalloc1(maxlocal,&sf->degreetmp);
1104:     for (i=0; i<sf->nroots; i++) sf->degree[i] = 0;
1105:     for (i=0; i<maxlocal; i++) sf->degreetmp[i] = 1;
1106:     PetscSFReduceBegin(sf,MPIU_INT,sf->degreetmp,sf->degree,MPI_SUM);
1107:   }
1108:   *degree = NULL;
1109:   return(0);
1110: }

1112: /*@C
1113:    PetscSFComputeDegreeEnd - complete computation of degree for each root vertex, started with PetscSFComputeDegreeBegin()

1115:    Collective

1117:    Input Arguments:
1118: .  sf - star forest

1120:    Output Arguments:
1121: .  degree - degree of each root vertex

1123:    Level: developer

1125: .seealso:
1126: @*/
1127: PetscErrorCode PetscSFComputeDegreeEnd(PetscSF sf,const PetscInt **degree)
1128: {

1133:   PetscSFCheckGraphSet(sf,1);
1134:   if (!sf->degreeknown) {
1135:     PetscSFReduceEnd(sf,MPIU_INT,sf->degreetmp,sf->degree,MPI_SUM);
1136:     PetscFree(sf->degreetmp);

1138:     sf->degreeknown = PETSC_TRUE;
1139:   }
1140:   *degree = sf->degree;
1141:   return(0);
1142: }

1144: /*@C
1145:    PetscSFFetchAndOpBegin - begin operation that fetches values from root and updates atomically by applying operation using my leaf value, to be completed with PetscSFFetchAndOpEnd()

1147:    Collective

1149:    Input Arguments:
1150: +  sf - star forest
1151: .  unit - data type
1152: .  leafdata - leaf values to use in reduction
1153: -  op - operation to use for reduction

1155:    Output Arguments:
1156: +  rootdata - root values to be updated, input state is seen by first process to perform an update
1157: -  leafupdate - state at each leaf's respective root immediately prior to my atomic update

1159:    Level: advanced

1161:    Note:
1162:    The update is only atomic at the granularity provided by the hardware. Different roots referenced by the same process
1163:    might be updated in a different order. Furthermore, if a composite type is used for the unit datatype, atomicity is
1164:    not guaranteed across the whole vertex. Therefore, this function is mostly only used with primitive types such as
1165:    integers.

1167: .seealso: PetscSFComputeDegreeBegin(), PetscSFReduceBegin(), PetscSFSetGraph()
1168: @*/
1169: PetscErrorCode PetscSFFetchAndOpBegin(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op)
1170: {

1175:   PetscSFCheckGraphSet(sf,1);
1176:   PetscLogEventBegin(PETSCSF_FetchAndOpBegin,sf,0,0,0);
1177:   PetscSFSetUp(sf);
1178:   (*sf->ops->FetchAndOpBegin)(sf,unit,rootdata,leafdata,leafupdate,op);
1179:   PetscLogEventEnd(PETSCSF_FetchAndOpBegin,sf,0,0,0);
1180:   return(0);
1181: }

1183: /*@C
1184:    PetscSFFetchAndOpEnd - end operation started in matching call to PetscSFFetchAndOpBegin() to fetch values from roots and update atomically by applying operation using my leaf value

1186:    Collective

1188:    Input Arguments:
1189: +  sf - star forest
1190: .  unit - data type
1191: .  leafdata - leaf values to use in reduction
1192: -  op - operation to use for reduction

1194:    Output Arguments:
1195: +  rootdata - root values to be updated, input state is seen by first process to perform an update
1196: -  leafupdate - state at each leaf's respective root immediately prior to my atomic update

1198:    Level: advanced

1200: .seealso: PetscSFComputeDegreeEnd(), PetscSFReduceEnd(), PetscSFSetGraph()
1201: @*/
1202: PetscErrorCode PetscSFFetchAndOpEnd(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op)
1203: {

1208:   PetscSFCheckGraphSet(sf,1);
1209:   PetscLogEventBegin(PETSCSF_FetchAndOpEnd,sf,0,0,0);
1210:   PetscSFSetUp(sf);
1211:   (*sf->ops->FetchAndOpEnd)(sf,unit,rootdata,leafdata,leafupdate,op);
1212:   PetscLogEventEnd(PETSCSF_FetchAndOpEnd,sf,0,0,0);
1213:   return(0);
1214: }

1216: /*@C
1217:    PetscSFGatherBegin - begin pointwise gather of all leaves into multi-roots, to be completed with PetscSFGatherEnd()

1219:    Collective

1221:    Input Arguments:
1222: +  sf - star forest
1223: .  unit - data type
1224: -  leafdata - leaf data to gather to roots

1226:    Output Argument:
1227: .  multirootdata - root buffer to gather into, amount of space per root is equal to its degree

1229:    Level: intermediate

1231: .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin()
1232: @*/
1233: PetscErrorCode PetscSFGatherBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata)
1234: {
1236:   PetscSF        multi;

1240:   PetscSFGetMultiSF(sf,&multi);
1241:   PetscSFReduceBegin(multi,unit,leafdata,multirootdata,MPIU_REPLACE);
1242:   return(0);
1243: }

1245: /*@C
1246:    PetscSFGatherEnd - ends pointwise gather operation that was started with PetscSFGatherBegin()

1248:    Collective

1250:    Input Arguments:
1251: +  sf - star forest
1252: .  unit - data type
1253: -  leafdata - leaf data to gather to roots

1255:    Output Argument:
1256: .  multirootdata - root buffer to gather into, amount of space per root is equal to its degree

1258:    Level: intermediate

1260: .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd()
1261: @*/
1262: PetscErrorCode PetscSFGatherEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata)
1263: {
1265:   PetscSF        multi;

1269:   PetscSFCheckGraphSet(sf,1);
1270:   PetscSFSetUp(sf);
1271:   PetscSFGetMultiSF(sf,&multi);
1272:   PetscSFReduceEnd(multi,unit,leafdata,multirootdata,MPIU_REPLACE);
1273:   return(0);
1274: }

1276: /*@C
1277:    PetscSFScatterBegin - begin pointwise scatter operation from multi-roots to leaves, to be completed with PetscSFScatterEnd()

1279:    Collective

1281:    Input Arguments:
1282: +  sf - star forest
1283: .  unit - data type
1284: -  multirootdata - root buffer to send to each leaf, one unit of data per leaf

1286:    Output Argument:
1287: .  leafdata - leaf data to be update with personal data from each respective root

1289:    Level: intermediate

1291: .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin()
1292: @*/
1293: PetscErrorCode PetscSFScatterBegin(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata)
1294: {
1296:   PetscSF        multi;

1300:   PetscSFCheckGraphSet(sf,1);
1301:   PetscSFSetUp(sf);
1302:   PetscSFGetMultiSF(sf,&multi);
1303:   PetscSFBcastBegin(multi,unit,multirootdata,leafdata);
1304:   return(0);
1305: }

1307: /*@C
1308:    PetscSFScatterEnd - ends pointwise scatter operation that was started with PetscSFScatterBegin()

1310:    Collective

1312:    Input Arguments:
1313: +  sf - star forest
1314: .  unit - data type
1315: -  multirootdata - root buffer to send to each leaf, one unit of data per leaf

1317:    Output Argument:
1318: .  leafdata - leaf data to be update with personal data from each respective root

1320:    Level: intermediate

1322: .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd()
1323: @*/
1324: PetscErrorCode PetscSFScatterEnd(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata)
1325: {
1327:   PetscSF        multi;

1331:   PetscSFCheckGraphSet(sf,1);
1332:   PetscSFSetUp(sf);
1333:   PetscSFGetMultiSF(sf,&multi);
1334:   PetscSFBcastEnd(multi,unit,multirootdata,leafdata);
1335:   return(0);
1336: }

1338: /*@
1339:   PetscSFCompose - Compose a new PetscSF equivalent to action to PetscSFs

1341:   Input Parameters:
1342: + sfA - The first PetscSF
1343: - sfB - The second PetscSF

1345:   Output Parameters:
1346: . sfBA - equvalent PetscSF for applying A then B

1348:   Level: developer

1350: .seealso: PetscSF, PetscSFGetGraph(), PetscSFSetGraph()
1351: @*/
1352: PetscErrorCode PetscSFCompose(PetscSF sfA, PetscSF sfB, PetscSF *sfBA)
1353: {
1354:   MPI_Comm           comm;
1355:   const PetscSFNode *remotePointsA, *remotePointsB;
1356:   PetscSFNode       *remotePointsBA;
1357:   const PetscInt    *localPointsA, *localPointsB;
1358:   PetscInt           numRootsA, numLeavesA, numRootsB, numLeavesB;
1359:   PetscErrorCode     ierr;

1364:   PetscObjectGetComm((PetscObject) sfA, &comm);
1365:   PetscSFGetGraph(sfA, &numRootsA, &numLeavesA, &localPointsA, &remotePointsA);
1366:   PetscSFGetGraph(sfB, &numRootsB, &numLeavesB, &localPointsB, &remotePointsB);
1367:   PetscMalloc1(numLeavesB, &remotePointsBA);
1368:   PetscSFBcastBegin(sfB, MPIU_2INT, remotePointsA, remotePointsBA);
1369:   PetscSFBcastEnd(sfB, MPIU_2INT, remotePointsA, remotePointsBA);
1370:   PetscSFCreate(comm, sfBA);
1371:   PetscSFSetGraph(*sfBA, numRootsA, numLeavesB, localPointsB, PETSC_COPY_VALUES, remotePointsBA, PETSC_OWN_POINTER);
1372:   return(0);
1373: }