Actual source code: plexdistribute.c

petsc-master 2015-09-01
Report Typos and Errors
  1: #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/

  5: /*@
  6:   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first

  8:   Input Parameters:
  9: + dm      - The DM object
 10: - useCone - Flag to use the cone first

 12:   Level: intermediate

 14:   Notes:
 15: $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
 16: $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
 17: $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE

 19: .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
 20: @*/
 21: PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
 22: {
 23:   DM_Plex *mesh = (DM_Plex *) dm->data;

 27:   mesh->useCone = useCone;
 28:   return(0);
 29: }

 33: /*@
 34:   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first

 36:   Input Parameter:
 37: . dm      - The DM object

 39:   Output Parameter:
 40: . useCone - Flag to use the cone first

 42:   Level: intermediate

 44:   Notes:
 45: $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
 46: $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
 47: $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE

 49: .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
 50: @*/
 51: PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
 52: {
 53:   DM_Plex *mesh = (DM_Plex *) dm->data;

 58:   *useCone = mesh->useCone;
 59:   return(0);
 60: }

 64: /*@
 65:   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure

 67:   Input Parameters:
 68: + dm      - The DM object
 69: - useClosure - Flag to use the closure

 71:   Level: intermediate

 73:   Notes:
 74: $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
 75: $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
 76: $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE

 78: .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
 79: @*/
 80: PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
 81: {
 82:   DM_Plex *mesh = (DM_Plex *) dm->data;

 86:   mesh->useClosure = useClosure;
 87:   return(0);
 88: }

 92: /*@
 93:   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure

 95:   Input Parameter:
 96: . dm      - The DM object

 98:   Output Parameter:
 99: . useClosure - Flag to use the closure

101:   Level: intermediate

103:   Notes:
104: $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
105: $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
106: $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE

108: .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
109: @*/
110: PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
111: {
112:   DM_Plex *mesh = (DM_Plex *) dm->data;

117:   *useClosure = mesh->useClosure;
118:   return(0);
119: }

123: /*@
124:   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.

126:   Input Parameters:
127: + dm      - The DM object
128: - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.

130:   Level: intermediate

132: .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
133: @*/
134: PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
135: {
136:   DM_Plex *mesh = (DM_Plex *) dm->data;

140:   mesh->useAnchors = useAnchors;
141:   return(0);
142: }

146: /*@
147:   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.

149:   Input Parameter:
150: . dm      - The DM object

152:   Output Parameter:
153: . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.

155:   Level: intermediate

157: .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
158: @*/
159: PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
160: {
161:   DM_Plex *mesh = (DM_Plex *) dm->data;

166:   *useAnchors = mesh->useAnchors;
167:   return(0);
168: }

172: static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
173: {
174:   const PetscInt *cone = NULL;
175:   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
176:   PetscErrorCode  ierr;

179:   DMPlexGetConeSize(dm, p, &coneSize);
180:   DMPlexGetCone(dm, p, &cone);
181:   for (c = 0; c <= coneSize; ++c) {
182:     const PetscInt  point   = !c ? p : cone[c-1];
183:     const PetscInt *support = NULL;
184:     PetscInt        supportSize, s, q;

186:     DMPlexGetSupportSize(dm, point, &supportSize);
187:     DMPlexGetSupport(dm, point, &support);
188:     for (s = 0; s < supportSize; ++s) {
189:       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190:         if (support[s] == adj[q]) break;
191:       }
192:       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193:     }
194:   }
195:   *adjSize = numAdj;
196:   return(0);
197: }

201: static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202: {
203:   const PetscInt *support = NULL;
204:   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205:   PetscErrorCode  ierr;

208:   DMPlexGetSupportSize(dm, p, &supportSize);
209:   DMPlexGetSupport(dm, p, &support);
210:   for (s = 0; s <= supportSize; ++s) {
211:     const PetscInt  point = !s ? p : support[s-1];
212:     const PetscInt *cone  = NULL;
213:     PetscInt        coneSize, c, q;

215:     DMPlexGetConeSize(dm, point, &coneSize);
216:     DMPlexGetCone(dm, point, &cone);
217:     for (c = 0; c < coneSize; ++c) {
218:       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
219:         if (cone[c] == adj[q]) break;
220:       }
221:       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
222:     }
223:   }
224:   *adjSize = numAdj;
225:   return(0);
226: }

230: static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
231: {
232:   PetscInt      *star = NULL;
233:   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;

237:   DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);
238:   for (s = 0; s < starSize*2; s += 2) {
239:     const PetscInt *closure = NULL;
240:     PetscInt        closureSize, c, q;

242:     DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);
243:     for (c = 0; c < closureSize*2; c += 2) {
244:       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
245:         if (closure[c] == adj[q]) break;
246:       }
247:       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
248:     }
249:     DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);
250:   }
251:   DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);
252:   *adjSize = numAdj;
253:   return(0);
254: }

258: PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
259: {
260:   static PetscInt asiz = 0;
261:   PetscInt maxAnchors = 1;
262:   PetscInt aStart = -1, aEnd = -1;
263:   PetscInt maxAdjSize;
264:   PetscSection aSec = NULL;
265:   IS aIS = NULL;
266:   const PetscInt *anchors;
267:   PetscErrorCode  ierr;

270:   if (useAnchors) {
271:     DMPlexGetAnchors(dm,&aSec,&aIS);
272:     if (aSec) {
273:       PetscSectionGetMaxDof(aSec,&maxAnchors);
274:       maxAnchors = PetscMax(1,maxAnchors);
275:       PetscSectionGetChart(aSec,&aStart,&aEnd);
276:       ISGetIndices(aIS,&anchors);
277:     }
278:   }
279:   if (!*adj) {
280:     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;

282:     DMPlexGetChart(dm, &pStart,&pEnd);
283:     DMPlexGetDepth(dm, &depth);
284:     DMPlexGetMaxSizes(dm, &maxC, &maxS);
285:     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
286:     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
287:     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
288:     asiz *= maxAnchors;
289:     asiz  = PetscMin(asiz,pEnd-pStart);
290:     PetscMalloc1(asiz,adj);
291:   }
292:   if (*adjSize < 0) *adjSize = asiz;
293:   maxAdjSize = *adjSize;
294:   if (useTransitiveClosure) {
295:     DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);
296:   } else if (useCone) {
297:     DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);
298:   } else {
299:     DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);
300:   }
301:   if (useAnchors && aSec) {
302:     PetscInt origSize = *adjSize;
303:     PetscInt numAdj = origSize;
304:     PetscInt i = 0, j;
305:     PetscInt *orig = *adj;

307:     while (i < origSize) {
308:       PetscInt p = orig[i];
309:       PetscInt aDof = 0;

311:       if (p >= aStart && p < aEnd) {
312:         PetscSectionGetDof(aSec,p,&aDof);
313:       }
314:       if (aDof) {
315:         PetscInt aOff;
316:         PetscInt s, q;

318:         for (j = i + 1; j < numAdj; j++) {
319:           orig[j - 1] = orig[j];
320:         }
321:         origSize--;
322:         numAdj--;
323:         PetscSectionGetOffset(aSec,p,&aOff);
324:         for (s = 0; s < aDof; ++s) {
325:           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
326:             if (anchors[aOff+s] == orig[q]) break;
327:           }
328:           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
329:         }
330:       }
331:       else {
332:         i++;
333:       }
334:     }
335:     *adjSize = numAdj;
336:     ISRestoreIndices(aIS,&anchors);
337:   }
338:   return(0);
339: }

343: /*@
344:   DMPlexGetAdjacency - Return all points adjacent to the given point

346:   Input Parameters:
347: + dm - The DM object
348: . p  - The point
349: . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
350: - adj - Either NULL so that the array is allocated, or an existing array with size adjSize

352:   Output Parameters:
353: + adjSize - The number of adjacent points
354: - adj - The adjacent points

356:   Level: advanced

358:   Notes: The user must PetscFree the adj array if it was not passed in.

360: .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
361: @*/
362: PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
363: {
364:   DM_Plex       *mesh = (DM_Plex *) dm->data;

371:   DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);
372:   return(0);
373: }
376: /*@
377:   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity

379:   Collective on DM

381:   Input Parameters:
382: + dm      - The DM
383: - sfPoint - The PetscSF which encodes point connectivity

385:   Output Parameters:
386: + processRanks - A list of process neighbors, or NULL
387: - sfProcess    - An SF encoding the two-sided process connectivity, or NULL

389:   Level: developer

391: .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
392: @*/
393: PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
394: {
395:   const PetscSFNode *remotePoints;
396:   PetscInt          *localPointsNew;
397:   PetscSFNode       *remotePointsNew;
398:   const PetscInt    *nranks;
399:   PetscInt          *ranksNew;
400:   PetscBT            neighbors;
401:   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
402:   PetscMPIInt        numProcs, proc, rank;
403:   PetscErrorCode     ierr;

410:   MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);
411:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
412:   PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);
413:   PetscBTCreate(numProcs, &neighbors);
414:   PetscBTMemzero(numProcs, neighbors);
415:   /* Compute root-to-leaf process connectivity */
416:   PetscSectionGetChart(rootRankSection, &pStart, &pEnd);
417:   ISGetIndices(rootRanks, &nranks);
418:   for (p = pStart; p < pEnd; ++p) {
419:     PetscInt ndof, noff, n;

421:     PetscSectionGetDof(rootRankSection, p, &ndof);
422:     PetscSectionGetOffset(rootRankSection, p, &noff);
423:     for (n = 0; n < ndof; ++n) {PetscBTSet(neighbors, nranks[noff+n]);}
424:   }
425:   ISRestoreIndices(rootRanks, &nranks);
426:   /* Compute leaf-to-neighbor process connectivity */
427:   PetscSectionGetChart(leafRankSection, &pStart, &pEnd);
428:   ISGetIndices(leafRanks, &nranks);
429:   for (p = pStart; p < pEnd; ++p) {
430:     PetscInt ndof, noff, n;

432:     PetscSectionGetDof(leafRankSection, p, &ndof);
433:     PetscSectionGetOffset(leafRankSection, p, &noff);
434:     for (n = 0; n < ndof; ++n) {PetscBTSet(neighbors, nranks[noff+n]);}
435:   }
436:   ISRestoreIndices(leafRanks, &nranks);
437:   /* Compute leaf-to-root process connectivity */
438:   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
439:   /* Calculate edges */
440:   PetscBTClear(neighbors, rank);
441:   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
442:   PetscMalloc1(numNeighbors, &ranksNew);
443:   PetscMalloc1(numNeighbors, &localPointsNew);
444:   PetscMalloc1(numNeighbors, &remotePointsNew);
445:   for(proc = 0, n = 0; proc < numProcs; ++proc) {
446:     if (PetscBTLookup(neighbors, proc)) {
447:       ranksNew[n]              = proc;
448:       localPointsNew[n]        = proc;
449:       remotePointsNew[n].index = rank;
450:       remotePointsNew[n].rank  = proc;
451:       ++n;
452:     }
453:   }
454:   PetscBTDestroy(&neighbors);
455:   if (processRanks) {ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);}
456:   else              {PetscFree(ranksNew);}
457:   if (sfProcess) {
458:     PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);
459:     PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");
460:     PetscSFSetFromOptions(*sfProcess);
461:     PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);
462:   }
463:   return(0);
464: }

468: /*@
469:   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.

471:   Collective on DM

473:   Input Parameter:
474: . dm - The DM

476:   Output Parameters:
477: + rootSection - The number of leaves for a given root point
478: . rootrank    - The rank of each edge into the root point
479: . leafSection - The number of processes sharing a given leaf point
480: - leafrank    - The rank of each process sharing a leaf point

482:   Level: developer

484: .seealso: DMPlexCreateOverlap()
485: @*/
486: PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
487: {
488:   MPI_Comm        comm;
489:   PetscSF         sfPoint;
490:   const PetscInt *rootdegree;
491:   PetscInt       *myrank, *remoterank;
492:   PetscInt        pStart, pEnd, p, nedges;
493:   PetscMPIInt     rank;
494:   PetscErrorCode  ierr;

497:   PetscObjectGetComm((PetscObject) dm, &comm);
498:   MPI_Comm_rank(comm, &rank);
499:   DMPlexGetChart(dm, &pStart, &pEnd);
500:   DMGetPointSF(dm, &sfPoint);
501:   /* Compute number of leaves for each root */
502:   PetscObjectSetName((PetscObject) rootSection, "Root Section");
503:   PetscSectionSetChart(rootSection, pStart, pEnd);
504:   PetscSFComputeDegreeBegin(sfPoint, &rootdegree);
505:   PetscSFComputeDegreeEnd(sfPoint, &rootdegree);
506:   for (p = pStart; p < pEnd; ++p) {PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);}
507:   PetscSectionSetUp(rootSection);
508:   /* Gather rank of each leaf to root */
509:   PetscSectionGetStorageSize(rootSection, &nedges);
510:   PetscMalloc1(pEnd-pStart, &myrank);
511:   PetscMalloc1(nedges,  &remoterank);
512:   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
513:   PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);
514:   PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);
515:   PetscFree(myrank);
516:   ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);
517:   /* Distribute remote ranks to leaves */
518:   PetscObjectSetName((PetscObject) leafSection, "Leaf Section");
519:   DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);
520:   return(0);
521: }

525: /*@C
526:   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.

528:   Collective on DM

530:   Input Parameters:
531: + dm          - The DM
532: . levels      - Number of overlap levels
533: . rootSection - The number of leaves for a given root point
534: . rootrank    - The rank of each edge into the root point
535: . leafSection - The number of processes sharing a given leaf point
536: - leafrank    - The rank of each process sharing a leaf point

538:   Output Parameters:
539: + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings

541:   Level: developer

543: .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
544: @*/
545: PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
546: {
547:   MPI_Comm           comm;
548:   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
549:   PetscSF            sfPoint, sfProc;
550:   const PetscSFNode *remote;
551:   const PetscInt    *local;
552:   const PetscInt    *nrank, *rrank;
553:   PetscInt          *adj = NULL;
554:   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
555:   PetscMPIInt        rank, numProcs;
556:   PetscBool          useCone, useClosure, flg;
557:   PetscErrorCode     ierr;

560:   PetscObjectGetComm((PetscObject) dm, &comm);
561:   MPI_Comm_size(comm, &numProcs);
562:   MPI_Comm_rank(comm, &rank);
563:   DMGetPointSF(dm, &sfPoint);
564:   DMPlexGetChart(dm, &pStart, &pEnd);
565:   PetscSectionGetChart(leafSection, &sStart, &sEnd);
566:   PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
567:   DMLabelCreate("Overlap adjacency", &ovAdjByRank);
568:   /* Handle leaves: shared with the root point */
569:   for (l = 0; l < nleaves; ++l) {
570:     PetscInt adjSize = PETSC_DETERMINE, a;

572:     DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);
573:     for (a = 0; a < adjSize; ++a) {DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);}
574:   }
575:   ISGetIndices(rootrank, &rrank);
576:   ISGetIndices(leafrank, &nrank);
577:   /* Handle roots */
578:   for (p = pStart; p < pEnd; ++p) {
579:     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;

581:     if ((p >= sStart) && (p < sEnd)) {
582:       /* Some leaves share a root with other leaves on different processes */
583:       PetscSectionGetDof(leafSection, p, &neighbors);
584:       if (neighbors) {
585:         PetscSectionGetOffset(leafSection, p, &noff);
586:         DMPlexGetAdjacency(dm, p, &adjSize, &adj);
587:         for (n = 0; n < neighbors; ++n) {
588:           const PetscInt remoteRank = nrank[noff+n];

590:           if (remoteRank == rank) continue;
591:           for (a = 0; a < adjSize; ++a) {DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);}
592:         }
593:       }
594:     }
595:     /* Roots are shared with leaves */
596:     PetscSectionGetDof(rootSection, p, &neighbors);
597:     if (!neighbors) continue;
598:     PetscSectionGetOffset(rootSection, p, &noff);
599:     DMPlexGetAdjacency(dm, p, &adjSize, &adj);
600:     for (n = 0; n < neighbors; ++n) {
601:       const PetscInt remoteRank = rrank[noff+n];

603:       if (remoteRank == rank) continue;
604:       for (a = 0; a < adjSize; ++a) {DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);}
605:     }
606:   }
607:   PetscFree(adj);
608:   ISRestoreIndices(rootrank, &rrank);
609:   ISRestoreIndices(leafrank, &nrank);
610:   /* Add additional overlap levels */
611:   for (l = 1; l < levels; l++) {DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);}
612:   /* We require the closure in the overlap */
613:   DMPlexGetAdjacencyUseCone(dm, &useCone);
614:   DMPlexGetAdjacencyUseClosure(dm, &useClosure);
615:   if (useCone || !useClosure) {
616:     DMPlexPartitionLabelClosure(dm, ovAdjByRank);
617:   }
618:   PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);
619:   if (flg) {
620:     DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);
621:   }
622:   /* Make process SF and invert sender to receiver label */
623:   DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);
624:   DMLabelCreate("Overlap label", ovLabel);
625:   DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);
626:   /* Add owned points, except for shared local points */
627:   for (p = pStart; p < pEnd; ++p) {DMLabelSetValue(*ovLabel, p, rank);}
628:   for (l = 0; l < nleaves; ++l) {
629:     DMLabelClearValue(*ovLabel, local[l], rank);
630:     DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);
631:   }
632:   /* Clean up */
633:   DMLabelDestroy(&ovAdjByRank);
634:   PetscSFDestroy(&sfProc);
635:   return(0);
636: }

640: PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
641: {
642:   MPI_Comm           comm;
643:   PetscMPIInt        rank, numProcs;
644:   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
645:   PetscInt          *pointDepths, *remoteDepths, *ilocal;
646:   PetscInt          *depthRecv, *depthShift, *depthIdx;
647:   PetscSFNode       *iremote;
648:   PetscSF            pointSF;
649:   const PetscInt    *sharedLocal;
650:   const PetscSFNode *overlapRemote, *sharedRemote;
651:   PetscErrorCode     ierr;

655:   PetscObjectGetComm((PetscObject)dm, &comm);
656:   MPI_Comm_rank(comm, &rank);
657:   MPI_Comm_size(comm, &numProcs);
658:   DMGetDimension(dm, &dim);

660:   /* Before building the migration SF we need to know the new stratum offsets */
661:   PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);
662:   PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);
663:   for (d=0; d<dim+1; d++) {
664:     DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);
665:     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
666:   }
667:   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
668:   PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);
669:   PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);

671:   /* Count recevied points in each stratum and compute the internal strata shift */
672:   PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);
673:   for (d=0; d<dim+1; d++) depthRecv[d]=0;
674:   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
675:   depthShift[dim] = 0;
676:   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
677:   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
678:   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
679:   for (d=0; d<dim+1; d++) {
680:     DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);
681:     depthIdx[d] = pStart + depthShift[d];
682:   }

684:   /* Form the overlap SF build an SF that describes the full overlap migration SF */
685:   DMPlexGetChart(dm, &pStart, &pEnd);
686:   newLeaves = pEnd - pStart + nleaves;
687:   PetscMalloc1(newLeaves, &ilocal);
688:   PetscMalloc1(newLeaves, &iremote);
689:   /* First map local points to themselves */
690:   for (d=0; d<dim+1; d++) {
691:     DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);
692:     for (p=pStart; p<pEnd; p++) {
693:       point = p + depthShift[d];
694:       ilocal[point] = point;
695:       iremote[point].index = p;
696:       iremote[point].rank = rank;
697:       depthIdx[d]++;
698:     }
699:   }

701:   /* Add in the remote roots for currently shared points */
702:   DMGetPointSF(dm, &pointSF);
703:   PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);
704:   for (d=0; d<dim+1; d++) {
705:     DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);
706:     for (p=0; p<numSharedPoints; p++) {
707:       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
708:         point = sharedLocal[p] + depthShift[d];
709:         iremote[point].index = sharedRemote[p].index;
710:         iremote[point].rank = sharedRemote[p].rank;
711:       }
712:     }
713:   }

715:   /* Now add the incoming overlap points */
716:   for (p=0; p<nleaves; p++) {
717:     point = depthIdx[remoteDepths[p]];
718:     ilocal[point] = point;
719:     iremote[point].index = overlapRemote[p].index;
720:     iremote[point].rank = overlapRemote[p].rank;
721:     depthIdx[remoteDepths[p]]++;
722:   }
723:   PetscFree2(pointDepths,remoteDepths);

725:   PetscSFCreate(comm, migrationSF);
726:   PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");
727:   PetscSFSetFromOptions(*migrationSF);
728:   DMPlexGetChart(dm, &pStart, &pEnd);
729:   PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);

731:   PetscFree3(depthRecv, depthShift, depthIdx);
732:   return(0);
733: }

737: /*@
738:   DMPlexStratifyMigrationSF - Add partition overlap to a distributed non-overlapping DM.

740:   Input Parameter:
741: + dm          - The DM
742: - sf          - A star forest with non-ordered leaves, usually defining a DM point migration

744:   Output Parameter:
745: . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified

747:   Level: developer

749: .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
750: @*/
751: PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
752: {
753:   MPI_Comm           comm;
754:   PetscMPIInt        rank, numProcs;
755:   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
756:   PetscInt          *pointDepths, *remoteDepths, *ilocal;
757:   PetscInt          *depthRecv, *depthShift, *depthIdx;
758:   const PetscSFNode *iremote;
759:   PetscErrorCode     ierr;

763:   PetscObjectGetComm((PetscObject) dm, &comm);
764:   MPI_Comm_rank(comm, &rank);
765:   MPI_Comm_size(comm, &numProcs);
766:   DMPlexGetDepth(dm, &ldepth);
767:   MPI_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);
768:   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);

770:   /* Before building the migration SF we need to know the new stratum offsets */
771:   PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);
772:   PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);
773:   for (d = 0; d < depth+1; ++d) {
774:     DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);
775:     for (p = pStart; p < pEnd; ++p) pointDepths[p] = d;
776:   }
777:   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
778:   PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);
779:   PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);
780:   /* Count recevied points in each stratum and compute the internal strata shift */
781:   PetscMalloc3(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx);
782:   for (d = 0; d < depth+1; ++d) depthRecv[d] = 0;
783:   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
784:   depthShift[depth] = 0;
785:   for (d = 0; d < depth; ++d) depthShift[d] = depthRecv[depth];
786:   for (d = 1; d < depth; ++d) depthShift[d] += depthRecv[0];
787:   for (d = depth-2; d > 0; --d) depthShift[d] += depthRecv[d+1];
788:   for (d = 0; d < depth+1; ++d) {depthIdx[d] = 0;}
789:   /* Derive a new local permutation based on stratified indices */
790:   PetscMalloc1(nleaves, &ilocal);
791:   for (p = 0; p < nleaves; ++p) {
792:     const PetscInt dep = remoteDepths[p];

794:     ilocal[p] = depthShift[dep] + depthIdx[dep];
795:     depthIdx[dep]++;
796:   }
797:   PetscSFCreate(comm, migrationSF);
798:   PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");
799:   PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);
800:   PetscFree2(pointDepths,remoteDepths);
801:   PetscFree3(depthRecv, depthShift, depthIdx);
802:   return(0);
803: }

807: /*@
808:   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution

810:   Collective on DM

812:   Input Parameters:
813: + dm - The DMPlex object
814: . pointSF - The PetscSF describing the communication pattern
815: . originalSection - The PetscSection for existing data layout
816: - originalVec - The existing data

818:   Output Parameters:
819: + newSection - The PetscSF describing the new data layout
820: - newVec - The new data

822:   Level: developer

824: .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
825: @*/
826: PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
827: {
828:   PetscSF        fieldSF;
829:   PetscInt      *remoteOffsets, fieldSize;
830:   PetscScalar   *originalValues, *newValues;

834:   PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);
835:   PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);

837:   PetscSectionGetStorageSize(newSection, &fieldSize);
838:   VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);
839:   VecSetType(newVec,dm->vectype);

841:   VecGetArray(originalVec, &originalValues);
842:   VecGetArray(newVec, &newValues);
843:   PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);
844:   PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);
845:   PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);
846:   PetscSFDestroy(&fieldSF);
847:   VecRestoreArray(newVec, &newValues);
848:   VecRestoreArray(originalVec, &originalValues);
849:   PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);
850:   return(0);
851: }

855: /*@
856:   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution

858:   Collective on DM

860:   Input Parameters:
861: + dm - The DMPlex object
862: . pointSF - The PetscSF describing the communication pattern
863: . originalSection - The PetscSection for existing data layout
864: - originalIS - The existing data

866:   Output Parameters:
867: + newSection - The PetscSF describing the new data layout
868: - newIS - The new data

870:   Level: developer

872: .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
873: @*/
874: PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
875: {
876:   PetscSF         fieldSF;
877:   PetscInt       *newValues, *remoteOffsets, fieldSize;
878:   const PetscInt *originalValues;
879:   PetscErrorCode  ierr;

882:   PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);
883:   PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);

885:   PetscSectionGetStorageSize(newSection, &fieldSize);
886:   PetscMalloc1(fieldSize, &newValues);

888:   ISGetIndices(originalIS, &originalValues);
889:   PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);
890:   PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);
891:   PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);
892:   PetscSFDestroy(&fieldSF);
893:   ISRestoreIndices(originalIS, &originalValues);
894:   ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);
895:   PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);
896:   return(0);
897: }

901: /*@
902:   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution

904:   Collective on DM

906:   Input Parameters:
907: + dm - The DMPlex object
908: . pointSF - The PetscSF describing the communication pattern
909: . originalSection - The PetscSection for existing data layout
910: . datatype - The type of data
911: - originalData - The existing data

913:   Output Parameters:
914: + newSection - The PetscSection describing the new data layout
915: - newData - The new data

917:   Level: developer

919: .seealso: DMPlexDistribute(), DMPlexDistributeField()
920: @*/
921: PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
922: {
923:   PetscSF        fieldSF;
924:   PetscInt      *remoteOffsets, fieldSize;
925:   PetscMPIInt    dataSize;

929:   PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);
930:   PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);

932:   PetscSectionGetStorageSize(newSection, &fieldSize);
933:   MPI_Type_size(datatype, &dataSize);
934:   PetscMalloc(fieldSize * dataSize, newData);

936:   PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);
937:   PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);
938:   PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);
939:   PetscSFDestroy(&fieldSF);
940:   PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);
941:   return(0);
942: }

946: PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
947: {
948:   DM_Plex               *mesh  = (DM_Plex*) dm->data;
949:   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
950:   MPI_Comm               comm;
951:   PetscSF                coneSF;
952:   PetscSection           originalConeSection, newConeSection;
953:   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
954:   PetscBool              flg;
955:   PetscErrorCode         ierr;

960:   PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);

962:   /* Distribute cone section */
963:   PetscObjectGetComm((PetscObject)dm, &comm);
964:   DMPlexGetConeSection(dm, &originalConeSection);
965:   DMPlexGetConeSection(dmParallel, &newConeSection);
966:   PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);
967:   DMSetUp(dmParallel);
968:   {
969:     PetscInt pStart, pEnd, p;

971:     PetscSectionGetChart(newConeSection, &pStart, &pEnd);
972:     for (p = pStart; p < pEnd; ++p) {
973:       PetscInt coneSize;
974:       PetscSectionGetDof(newConeSection, p, &coneSize);
975:       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
976:     }
977:   }
978:   /* Communicate and renumber cones */
979:   PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);
980:   DMPlexGetCones(dm, &cones);
981:   DMPlexGetCones(dmParallel, &newCones);
982:   PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);
983:   PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);
984:   PetscSectionGetStorageSize(newConeSection, &newConesSize);
985:   ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);
986: #if PETSC_USE_DEBUG
987:   {
988:     PetscInt  p;
989:     PetscBool valid = PETSC_TRUE;
990:     for (p = 0; p < newConesSize; ++p) {
991:       if (newCones[p] < 0) {valid = PETSC_FALSE; PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);}
992:     }
993:     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
994:   }
995: #endif
996:   PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);
997:   if (flg) {
998:     PetscPrintf(comm, "Serial Cone Section:\n");
999:     PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);
1000:     PetscPrintf(comm, "Parallel Cone Section:\n");
1001:     PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);
1002:     PetscSFView(coneSF, NULL);
1003:   }
1004:   DMPlexGetConeOrientations(dm, &cones);
1005:   DMPlexGetConeOrientations(dmParallel, &newCones);
1006:   PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);
1007:   PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);
1008:   PetscSFDestroy(&coneSF);
1009:   PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);
1010:   /* Create supports and stratify sieve */
1011:   {
1012:     PetscInt pStart, pEnd;

1014:     PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);
1015:     PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);
1016:   }
1017:   DMPlexSymmetrize(dmParallel);
1018:   DMPlexStratify(dmParallel);
1019:   pmesh->useCone    = mesh->useCone;
1020:   pmesh->useClosure = mesh->useClosure;
1021:   return(0);
1022: }

1026: PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1027: {
1028:   MPI_Comm         comm;
1029:   PetscSection     originalCoordSection, newCoordSection;
1030:   Vec              originalCoordinates, newCoordinates;
1031:   PetscInt         bs;
1032:   const char      *name;
1033:   const PetscReal *maxCell, *L;
1034:   const DMBoundaryType *bd;
1035:   PetscErrorCode   ierr;


1041:   PetscObjectGetComm((PetscObject)dm, &comm);
1042:   DMGetCoordinateSection(dm, &originalCoordSection);
1043:   DMGetCoordinateSection(dmParallel, &newCoordSection);
1044:   DMGetCoordinatesLocal(dm, &originalCoordinates);
1045:   if (originalCoordinates) {
1046:     VecCreate(comm, &newCoordinates);
1047:     PetscObjectGetName((PetscObject) originalCoordinates, &name);
1048:     PetscObjectSetName((PetscObject) newCoordinates, name);

1050:     DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);
1051:     DMSetCoordinatesLocal(dmParallel, newCoordinates);
1052:     VecGetBlockSize(originalCoordinates, &bs);
1053:     VecSetBlockSize(newCoordinates, bs);
1054:     VecDestroy(&newCoordinates);
1055:   }
1056:   DMGetPeriodicity(dm, &maxCell, &L, &bd);
1057:   if (L) {DMSetPeriodicity(dmParallel, maxCell, L, bd);}
1058:   return(0);
1059: }

1063: /* Here we are assuming that process 0 always has everything */
1064: PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1065: {
1066:   MPI_Comm       comm;
1067:   PetscMPIInt    rank;
1068:   PetscInt       numLabels, numLocalLabels, l;
1069:   PetscBool      hasLabels = PETSC_FALSE;

1075:   PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);
1076:   PetscObjectGetComm((PetscObject)dm, &comm);
1077:   MPI_Comm_rank(comm, &rank);

1079:   /* Everyone must have either the same number of labels, or none */
1080:   DMPlexGetNumLabels(dm, &numLocalLabels);
1081:   numLabels = numLocalLabels;
1082:   MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);
1083:   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1084:   for (l = numLabels-1; l >= 0; --l) {
1085:     DMLabel     label = NULL, labelNew = NULL;
1086:     PetscBool   isdepth;

1088:     if (hasLabels) {
1089:       DMPlexGetLabelByNum(dm, l, &label);
1090:       /* Skip "depth" because it is recreated */
1091:       PetscStrcmp(label->name, "depth", &isdepth);
1092:     }
1093:     MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);
1094:     if (isdepth) continue;
1095:     DMLabelDistribute(label, migrationSF, &labelNew);
1096:     DMPlexAddLabel(dmParallel, labelNew);
1097:   }
1098:   PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);
1099:   return(0);
1100: }

1104: PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1105: {
1106:   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1107:   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1108:   MPI_Comm        comm;
1109:   const PetscInt *gpoints;
1110:   PetscInt        dim, depth, n, d;
1111:   PetscErrorCode  ierr;


1117:   PetscObjectGetComm((PetscObject)dm, &comm);
1118:   DMGetDimension(dm, &dim);

1120:   /* Setup hybrid structure */
1121:   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1122:   MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);
1123:   ISLocalToGlobalMappingGetSize(renumbering, &n);
1124:   ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);
1125:   DMPlexGetDepth(dm, &depth);
1126:   for (d = 0; d <= dim; ++d) {
1127:     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;

1129:     if (pmax < 0) continue;
1130:     DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);
1131:     DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);
1132:     MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);
1133:     for (p = 0; p < n; ++p) {
1134:       const PetscInt point = gpoints[p];

1136:       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1137:     }
1138:     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1139:     else            pmesh->hybridPointMax[d] = -1;
1140:   }
1141:   ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);
1142:   return(0);
1143: }

1147: PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1148: {
1149:   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1150:   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1151:   MPI_Comm        comm;
1152:   DM              refTree;
1153:   PetscSection    origParentSection, newParentSection;
1154:   PetscInt        *origParents, *origChildIDs;
1155:   PetscBool       flg;
1156:   PetscErrorCode  ierr;

1161:   PetscObjectGetComm((PetscObject)dm, &comm);

1163:   /* Set up tree */
1164:   DMPlexGetReferenceTree(dm,&refTree);
1165:   DMPlexSetReferenceTree(dmParallel,refTree);
1166:   DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);
1167:   if (origParentSection) {
1168:     PetscInt        pStart, pEnd;
1169:     PetscInt        *newParents, *newChildIDs;
1170:     PetscInt        *remoteOffsetsParents, newParentSize;
1171:     PetscSF         parentSF;

1173:     DMPlexGetChart(dmParallel, &pStart, &pEnd);
1174:     PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);
1175:     PetscSectionSetChart(newParentSection,pStart,pEnd);
1176:     PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);
1177:     PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);
1178:     PetscSectionGetStorageSize(newParentSection,&newParentSize);
1179:     PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);
1180:     PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);
1181:     PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);
1182:     PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);
1183:     PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);
1184:     ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);
1185: #if PETSC_USE_DEBUG
1186:     {
1187:       PetscInt  p;
1188:       PetscBool valid = PETSC_TRUE;
1189:       for (p = 0; p < newParentSize; ++p) {
1190:         if (newParents[p] < 0) {valid = PETSC_FALSE; PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);}
1191:       }
1192:       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1193:     }
1194: #endif
1195:     PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);
1196:     if (flg) {
1197:       PetscPrintf(comm, "Serial Parent Section: \n");
1198:       PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);
1199:       PetscPrintf(comm, "Parallel Parent Section: \n");
1200:       PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);
1201:       PetscSFView(parentSF, NULL);
1202:     }
1203:     DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);
1204:     PetscSectionDestroy(&newParentSection);
1205:     PetscFree2(newParents,newChildIDs);
1206:     PetscSFDestroy(&parentSF);
1207:   }
1208:   pmesh->useAnchors = mesh->useAnchors;
1209:   return(0);
1210: }

1214: PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1215: {
1216:   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1217:   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1218:   PetscMPIInt            rank, numProcs;
1219:   MPI_Comm               comm;
1220:   PetscErrorCode         ierr;


1226:   /* Create point SF for parallel mesh */
1227:   PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);
1228:   PetscObjectGetComm((PetscObject)dm, &comm);
1229:   MPI_Comm_rank(comm, &rank);
1230:   MPI_Comm_size(comm, &numProcs);
1231:   {
1232:     const PetscInt *leaves;
1233:     PetscSFNode    *remotePoints, *rowners, *lowners;
1234:     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1235:     PetscInt        pStart, pEnd;

1237:     DMPlexGetChart(dmParallel, &pStart, &pEnd);
1238:     PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);
1239:     PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);
1240:     for (p=0; p<numRoots; p++) {
1241:       rowners[p].rank  = -1;
1242:       rowners[p].index = -1;
1243:     }
1244:     PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);
1245:     PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);
1246:     for (p = 0; p < numLeaves; ++p) {
1247:       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1248:         lowners[p].rank  = rank;
1249:         lowners[p].index = leaves ? leaves[p] : p;
1250:       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1251:         lowners[p].rank  = -2;
1252:         lowners[p].index = -2;
1253:       }
1254:     }
1255:     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1256:       rowners[p].rank  = -3;
1257:       rowners[p].index = -3;
1258:     }
1259:     PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);
1260:     PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);
1261:     PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);
1262:     PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);
1263:     for (p = 0; p < numLeaves; ++p) {
1264:       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1265:       if (lowners[p].rank != rank) ++numGhostPoints;
1266:     }
1267:     PetscMalloc1(numGhostPoints, &ghostPoints);
1268:     PetscMalloc1(numGhostPoints, &remotePoints);
1269:     for (p = 0, gp = 0; p < numLeaves; ++p) {
1270:       if (lowners[p].rank != rank) {
1271:         ghostPoints[gp]        = leaves ? leaves[p] : p;
1272:         remotePoints[gp].rank  = lowners[p].rank;
1273:         remotePoints[gp].index = lowners[p].index;
1274:         ++gp;
1275:       }
1276:     }
1277:     PetscFree2(rowners,lowners);
1278:     PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
1279:     PetscSFSetFromOptions((dmParallel)->sf);
1280:   }
1281:   pmesh->useCone    = mesh->useCone;
1282:   pmesh->useClosure = mesh->useClosure;
1283:   PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);
1284:   return(0);
1285: }

1289: /*@C
1290:   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration

1292:   Input Parameter:
1293: + dm          - The source DMPlex object
1294: . migrationSF - The star forest that describes the parallel point remapping
1295: . ownership   - Flag causing a vote to determine point ownership

1297:   Output Parameter:
1298: - pointSF     - The star forest describing the point overlap in the remapped DM

1300:   Level: developer

1302: .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1303: @*/
1304: PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1305: {
1306:   PetscMPIInt        rank;
1307:   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1308:   PetscInt          *pointLocal;
1309:   const PetscInt    *leaves;
1310:   const PetscSFNode *roots;
1311:   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1312:   PetscErrorCode     ierr;

1316:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);

1318:   PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);
1319:   PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);
1320:   if (ownership) {
1321:     /* Point ownership vote: Process with highest rank ownes shared points */
1322:     for (p = 0; p < nleaves; ++p) {
1323:       /* Either put in a bid or we know we own it */
1324:       leafNodes[p].rank  = rank;
1325:       leafNodes[p].index = p;
1326:     }
1327:     for (p = 0; p < nroots; p++) {
1328:       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1329:       rootNodes[p].rank  = -3;
1330:       rootNodes[p].index = -3;
1331:     }
1332:     PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);
1333:     PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);
1334:   } else {
1335:     for (p = 0; p < nroots; p++) {
1336:       rootNodes[p].index = -1;
1337:       rootNodes[p].rank = rank;
1338:     };
1339:     for (p = 0; p < nleaves; p++) {
1340:       /* Write new local id into old location */
1341:       if (roots[p].rank == rank) {
1342:         rootNodes[roots[p].index].index = leaves[p];
1343:       }
1344:     }
1345:   }
1346:   PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);
1347:   PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);

1349:   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1350:   PetscMalloc1(npointLeaves, &pointLocal);
1351:   PetscMalloc1(npointLeaves, &pointRemote);
1352:   for (idx = 0, p = 0; p < nleaves; p++) {
1353:     if (leafNodes[p].rank != rank) {
1354:       pointLocal[idx] = p;
1355:       pointRemote[idx] = leafNodes[p];
1356:       idx++;
1357:     }
1358:   }
1359:   PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);
1360:   PetscSFSetFromOptions(*pointSF);
1361:   PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);
1362:   PetscFree2(rootNodes, leafNodes);
1363:   return(0);
1364: }

1368: /*@C
1369:   DMPlexMigrate  - Migrates internal DM data over the supplied star forest

1371:   Input Parameter:
1372: + dm       - The source DMPlex object
1373: . sf       - The star forest communication context describing the migration pattern

1375:   Output Parameter:
1376: - targetDM - The target DMPlex object

1378:   Level: intermediate

1380: .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1381: @*/
1382: PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1383: {
1384:   MPI_Comm               comm;
1385:   PetscInt               dim, nroots;
1386:   PetscSF                sfPoint;
1387:   ISLocalToGlobalMapping ltogMigration;
1388:   PetscBool              flg;
1389:   PetscErrorCode         ierr;

1393:   PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);
1394:   PetscObjectGetComm((PetscObject) dm, &comm);
1395:   DMGetDimension(dm, &dim);
1396:   DMSetDimension(targetDM, dim);

1398:   /* Check for a one-to-all distribution pattern */
1399:   DMGetPointSF(dm, &sfPoint);
1400:   PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);
1401:   if (nroots >= 0) {
1402:     IS                     isOriginal;
1403:     ISLocalToGlobalMapping ltogOriginal;
1404:     PetscInt               n, size, nleaves, conesSize;
1405:     PetscInt              *numbering_orig, *numbering_new, *cones;
1406:     PetscSection           coneSection;
1407:     /* Get the original point numbering */
1408:     DMPlexCreatePointNumbering(dm, &isOriginal);
1409:     ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);
1410:     ISLocalToGlobalMappingGetSize(ltogOriginal, &size);
1411:     ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);
1412:     /* Convert to positive global numbers */
1413:     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1414:     /* Derive the new local-to-global mapping from the old one */
1415:     PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);
1416:     PetscMalloc1(nleaves, &numbering_new);
1417:     PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);
1418:     PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);
1419:     ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);
1420:     ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);
1421:     /* Convert cones to global numbering before migrating them */
1422:     DMPlexGetConeSection(dm, &coneSection);
1423:     PetscSectionGetStorageSize(coneSection, &conesSize);
1424:     DMPlexGetCones(dm, &cones);
1425:     ISLocalToGlobalMappingApplyBlock(ltogOriginal, conesSize, cones, cones);
1426:     ISDestroy(&isOriginal);
1427:     ISLocalToGlobalMappingDestroy(&ltogOriginal);
1428:   } else {
1429:     /* One-to-all distribution pattern: We can derive LToG from SF */
1430:     ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);
1431:   }
1432:   PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);
1433:   if (flg) {
1434:     PetscPrintf(comm, "Point renumbering for DM migration:\n");
1435:     ISLocalToGlobalMappingView(ltogMigration, NULL);
1436:   }
1437:   /* Migrate DM data to target DM */
1438:   DMPlexDistributeCones(dm, sf, ltogMigration, targetDM);
1439:   DMPlexDistributeCoordinates(dm, sf, targetDM);
1440:   DMPlexDistributeLabels(dm, sf, targetDM);
1441:   DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);
1442:   DMPlexDistributeSetupTree(dm, sf, ltogMigration, targetDM);
1443:   ISLocalToGlobalMappingDestroy(&ltogMigration);
1444:   PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);
1445:   return(0);
1446: }

1450: /*@C
1451:   DMPlexDistribute - Distributes the mesh and any associated sections.

1453:   Not Collective

1455:   Input Parameter:
1456: + dm  - The original DMPlex object
1457: - overlap - The overlap of partitions, 0 is the default

1459:   Output Parameter:
1460: + sf - The PetscSF used for point distribution
1461: - parallelMesh - The distributed DMPlex object, or NULL

1463:   Note: If the mesh was not distributed, the return value is NULL.

1465:   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1466:   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1467:   representation on the mesh.

1469:   Level: intermediate

1471: .keywords: mesh, elements
1472: .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1473: @*/
1474: PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1475: {
1476:   MPI_Comm               comm;
1477:   PetscPartitioner       partitioner;
1478:   IS                     cellPart;
1479:   PetscSection           cellPartSection;
1480:   DM                     dmCoord;
1481:   DMLabel                lblPartition, lblMigration;
1482:   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1483:   PetscBool              flg;
1484:   PetscMPIInt            rank, numProcs, p;
1485:   PetscErrorCode         ierr;


1492:   PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);
1493:   PetscObjectGetComm((PetscObject)dm,&comm);
1494:   MPI_Comm_rank(comm, &rank);
1495:   MPI_Comm_size(comm, &numProcs);

1497:   *dmParallel = NULL;
1498:   if (numProcs == 1) return(0);

1500:   /* Create cell partition */
1501:   PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);
1502:   PetscSectionCreate(comm, &cellPartSection);
1503:   DMPlexGetPartitioner(dm, &partitioner);
1504:   PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);
1505:   {
1506:     /* Convert partition to DMLabel */
1507:     PetscInt proc, pStart, pEnd, npoints, poffset;
1508:     const PetscInt *points;
1509:     DMLabelCreate("Point Partition", &lblPartition);
1510:     ISGetIndices(cellPart, &points);
1511:     PetscSectionGetChart(cellPartSection, &pStart, &pEnd);
1512:     for (proc = pStart; proc < pEnd; proc++) {
1513:       PetscSectionGetDof(cellPartSection, proc, &npoints);
1514:       PetscSectionGetOffset(cellPartSection, proc, &poffset);
1515:       for (p = poffset; p < poffset+npoints; p++) {
1516:         DMLabelSetValue(lblPartition, points[p], proc);
1517:       }
1518:     }
1519:     ISRestoreIndices(cellPart, &points);
1520:   }
1521:   DMPlexPartitionLabelClosure(dm, lblPartition);
1522:   {
1523:     /* Build a global process SF */
1524:     PetscSFNode *remoteProc;
1525:     PetscMalloc1(numProcs, &remoteProc);
1526:     for (p = 0; p < numProcs; ++p) {
1527:       remoteProc[p].rank  = p;
1528:       remoteProc[p].index = rank;
1529:     }
1530:     PetscSFCreate(comm, &sfProcess);
1531:     PetscObjectSetName((PetscObject) sfProcess, "Process SF");
1532:     PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);
1533:   }
1534:   DMLabelCreate("Point migration", &lblMigration);
1535:   DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);
1536:   DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);
1537:   /* Stratify the SF in case we are migrating an already parallel plex */
1538:   DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);
1539:   PetscSFDestroy(&sfMigration);
1540:   sfMigration = sfStratified;
1541:   PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);
1542:   PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);
1543:   if (flg) {
1544:     DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);
1545:     PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);
1546:   }

1548:   /* Create non-overlapping parallel DM and migrate internal data */
1549:   DMPlexCreate(comm, dmParallel);
1550:   PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");
1551:   DMPlexMigrate(dm, sfMigration, *dmParallel);

1553:   /* Build the point SF without overlap */
1554:   DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);
1555:   DMSetPointSF(*dmParallel, sfPoint);
1556:   DMGetCoordinateDM(*dmParallel, &dmCoord);
1557:   if (dmCoord) {DMSetPointSF(dmCoord, sfPoint);}
1558:   if (flg) {PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);}

1560:   if (overlap > 0) {
1561:     DM                 dmOverlap;
1562:     PetscInt           nroots, nleaves;
1563:     PetscSFNode       *newRemote;
1564:     const PetscSFNode *oldRemote;
1565:     PetscSF            sfOverlap, sfOverlapPoint;
1566:     /* Add the partition overlap to the distributed DM */
1567:     DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);
1568:     DMDestroy(dmParallel);
1569:     *dmParallel = dmOverlap;
1570:     if (flg) {
1571:       PetscPrintf(comm, "Overlap Migration SF:\n");
1572:       PetscSFView(sfOverlap, NULL);
1573:     }

1575:     /* Re-map the migration SF to establish the full migration pattern */
1576:     PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);
1577:     PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);
1578:     PetscMalloc1(nleaves, &newRemote);
1579:     PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);
1580:     PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);
1581:     PetscSFCreate(comm, &sfOverlapPoint);
1582:     PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);
1583:     PetscSFDestroy(&sfOverlap);
1584:     PetscSFDestroy(&sfMigration);
1585:     sfMigration = sfOverlapPoint;
1586:   }
1587:   /* Cleanup Partition */
1588:   PetscSFDestroy(&sfProcess);
1589:   DMLabelDestroy(&lblPartition);
1590:   DMLabelDestroy(&lblMigration);
1591:   PetscSectionDestroy(&cellPartSection);
1592:   ISDestroy(&cellPart);
1593:   /* Copy BC */
1594:   DMPlexCopyBoundary(dm, *dmParallel);
1595:   /* Create sfNatural */
1596:   if (dm->useNatural) {
1597:     PetscSection section;

1599:     DMGetDefaultSection(dm, &section);
1600:     DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);
1601:   }
1602:   /* Cleanup */
1603:   if (sf) {*sf = sfMigration;}
1604:   else    {PetscSFDestroy(&sfMigration);}
1605:   PetscSFDestroy(&sfPoint);
1606:   PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);
1607:   return(0);
1608: }

1612: /*@C
1613:   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.

1615:   Not Collective

1617:   Input Parameter:
1618: + dm  - The non-overlapping distrbuted DMPlex object
1619: - overlap - The overlap of partitions, 0 is the default

1621:   Output Parameter:
1622: + sf - The PetscSF used for point distribution
1623: - dmOverlap - The overlapping distributed DMPlex object, or NULL

1625:   Note: If the mesh was not distributed, the return value is NULL.

1627:   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1628:   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1629:   representation on the mesh.

1631:   Level: intermediate

1633: .keywords: mesh, elements
1634: .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1635: @*/
1636: PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1637: {
1638:   MPI_Comm               comm;
1639:   PetscMPIInt            rank;
1640:   PetscSection           rootSection, leafSection;
1641:   IS                     rootrank, leafrank;
1642:   DM                     dmCoord;
1643:   DMLabel                lblOverlap;
1644:   PetscSF                sfOverlap, sfStratified, sfPoint;
1645:   PetscErrorCode         ierr;


1652:   PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);
1653:   PetscObjectGetComm((PetscObject)dm,&comm);
1654:   MPI_Comm_rank(comm, &rank);

1656:   /* Compute point overlap with neighbouring processes on the distributed DM */
1657:   PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);
1658:   PetscSectionCreate(comm, &rootSection);
1659:   PetscSectionCreate(comm, &leafSection);
1660:   DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);
1661:   DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);
1662:   /* Convert overlap label to stratified migration SF */
1663:   DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);
1664:   DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);
1665:   PetscSFDestroy(&sfOverlap);
1666:   sfOverlap = sfStratified;
1667:   PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");
1668:   PetscSFSetFromOptions(sfOverlap);

1670:   PetscSectionDestroy(&rootSection);
1671:   PetscSectionDestroy(&leafSection);
1672:   ISDestroy(&rootrank);
1673:   ISDestroy(&leafrank);
1674:   PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);

1676:   /* Build the overlapping DM */
1677:   DMPlexCreate(comm, dmOverlap);
1678:   PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");
1679:   DMPlexMigrate(dm, sfOverlap, *dmOverlap);
1680:   /* Build the new point SF */
1681:   DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);
1682:   DMSetPointSF(*dmOverlap, sfPoint);
1683:   DMGetCoordinateDM(*dmOverlap, &dmCoord);
1684:   if (dmCoord) {DMSetPointSF(dmCoord, sfPoint);}
1685:   PetscSFDestroy(&sfPoint);
1686:   /* Cleanup overlap partition */
1687:   DMLabelDestroy(&lblOverlap);
1688:   if (sf) *sf = sfOverlap;
1689:   else    {PetscSFDestroy(&sfOverlap);}
1690:   PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);
1691:   return(0);
1692: }