Actual source code: plexdistribute.c

petsc-master 2014-10-20
Report Typos and Errors
  1: #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
  2: #include <petscsf.h>

  6: /*@
  7:   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first

  9:   Input Parameters:
 10: + dm      - The DM object
 11: - useCone - Flag to use the cone first

 13:   Level: intermediate

 15:   Notes:
 16: $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
 17: $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
 18: $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE

 20: .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
 21: @*/
 22: PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
 23: {
 24:   DM_Plex *mesh = (DM_Plex *) dm->data;

 28:   mesh->useCone = useCone;
 29:   return(0);
 30: }

 34: /*@
 35:   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first

 37:   Input Parameter:
 38: . dm      - The DM object

 40:   Output Parameter:
 41: . useCone - Flag to use the cone first

 43:   Level: intermediate

 45:   Notes:
 46: $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
 47: $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
 48: $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE

 50: .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
 51: @*/
 52: PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
 53: {
 54:   DM_Plex *mesh = (DM_Plex *) dm->data;

 59:   *useCone = mesh->useCone;
 60:   return(0);
 61: }

 65: /*@
 66:   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure

 68:   Input Parameters:
 69: + dm      - The DM object
 70: - useClosure - Flag to use the closure

 72:   Level: intermediate

 74:   Notes:
 75: $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
 76: $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
 77: $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE

 79: .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
 80: @*/
 81: PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
 82: {
 83:   DM_Plex *mesh = (DM_Plex *) dm->data;

 87:   mesh->useClosure = useClosure;
 88:   return(0);
 89: }

 93: /*@
 94:   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure

 96:   Input Parameter:
 97: . dm      - The DM object

 99:   Output Parameter:
100: . useClosure - Flag to use the closure

102:   Level: intermediate

104:   Notes:
105: $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106: $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107: $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE

109: .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110: @*/
111: PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112: {
113:   DM_Plex *mesh = (DM_Plex *) dm->data;

118:   *useClosure = mesh->useClosure;
119:   return(0);
120: }

124: /*@
125:   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.

127:   Input Parameters:
128: + dm      - The DM object
129: - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.

131:   Level: intermediate

133: .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134: @*/
135: PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136: {
137:   DM_Plex *mesh = (DM_Plex *) dm->data;

141:   mesh->useAnchors = useAnchors;
142:   return(0);
143: }

147: /*@
148:   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.

150:   Input Parameter:
151: . dm      - The DM object

153:   Output Parameter:
154: . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.

156:   Level: intermediate

158: .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159: @*/
160: PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161: {
162:   DM_Plex *mesh = (DM_Plex *) dm->data;

167:   *useAnchors = mesh->useAnchors;
168:   return(0);
169: }

173: static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174: {
175:   const PetscInt *cone = NULL;
176:   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177:   PetscErrorCode  ierr;

180:   DMPlexGetConeSize(dm, p, &coneSize);
181:   DMPlexGetCone(dm, p, &cone);
182:   for (c = 0; c <= coneSize; ++c) {
183:     const PetscInt  point   = !c ? p : cone[c-1];
184:     const PetscInt *support = NULL;
185:     PetscInt        supportSize, s, q;

187:     DMPlexGetSupportSize(dm, point, &supportSize);
188:     DMPlexGetSupport(dm, point, &support);
189:     for (s = 0; s < supportSize; ++s) {
190:       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
191:         if (support[s] == adj[q]) break;
192:       }
193:       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
194:     }
195:   }
196:   *adjSize = numAdj;
197:   return(0);
198: }

202: static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
203: {
204:   const PetscInt *support = NULL;
205:   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
206:   PetscErrorCode  ierr;

209:   DMPlexGetSupportSize(dm, p, &supportSize);
210:   DMPlexGetSupport(dm, p, &support);
211:   for (s = 0; s <= supportSize; ++s) {
212:     const PetscInt  point = !s ? p : support[s-1];
213:     const PetscInt *cone  = NULL;
214:     PetscInt        coneSize, c, q;

216:     DMPlexGetConeSize(dm, point, &coneSize);
217:     DMPlexGetCone(dm, point, &cone);
218:     for (c = 0; c < coneSize; ++c) {
219:       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
220:         if (cone[c] == adj[q]) break;
221:       }
222:       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
223:     }
224:   }
225:   *adjSize = numAdj;
226:   return(0);
227: }

231: static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
232: {
233:   PetscInt      *star = NULL;
234:   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;

238:   DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);
239:   for (s = 0; s < starSize*2; s += 2) {
240:     const PetscInt *closure = NULL;
241:     PetscInt        closureSize, c, q;

243:     DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);
244:     for (c = 0; c < closureSize*2; c += 2) {
245:       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
246:         if (closure[c] == adj[q]) break;
247:       }
248:       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
249:     }
250:     DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);
251:   }
252:   DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);
253:   *adjSize = numAdj;
254:   return(0);
255: }

259: PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
260: {
261:   static PetscInt asiz = 0;
262:   PetscInt maxAnchors = 1;
263:   PetscInt aStart = -1, aEnd = -1;
264:   PetscInt maxAdjSize;
265:   PetscSection aSec = NULL;
266:   IS aIS = NULL;
267:   const PetscInt *anchors;
268:   PetscErrorCode  ierr;

271:   if (useAnchors) {
272:     DMPlexGetAnchors(dm,&aSec,&aIS);
273:     if (aSec) {
274:       PetscSectionGetMaxDof(aSec,&maxAnchors);
275:       maxAnchors = PetscMax(1,maxAnchors);
276:       PetscSectionGetChart(aSec,&aStart,&aEnd);
277:       ISGetIndices(aIS,&anchors);
278:     }
279:   }
280:   if (!*adj) {
281:     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;

283:     DMPlexGetChart(dm, &pStart,&pEnd);
284:     DMPlexGetDepth(dm, &depth);
285:     DMPlexGetMaxSizes(dm, &maxC, &maxS);
286:     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
287:     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
288:     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
289:     asiz *= maxAnchors;
290:     asiz  = PetscMin(asiz,pEnd-pStart);
291:     PetscMalloc1(asiz,adj);
292:   }
293:   if (*adjSize < 0) *adjSize = asiz;
294:   maxAdjSize = *adjSize;
295:   if (useTransitiveClosure) {
296:     DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);
297:   } else if (useCone) {
298:     DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);
299:   } else {
300:     DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);
301:   }
302:   if (useAnchors && aSec) {
303:     PetscInt origSize = *adjSize;
304:     PetscInt numAdj = origSize;
305:     PetscInt i = 0, j;
306:     PetscInt *orig = *adj;

308:     while (i < origSize) {
309:       PetscInt p = orig[i];
310:       PetscInt aDof = 0;

312:       if (p >= aStart && p < aEnd) {
313:         PetscSectionGetDof(aSec,p,&aDof);
314:       }
315:       if (aDof) {
316:         PetscInt aOff;
317:         PetscInt s, q;

319:         for (j = i + 1; j < numAdj; j++) {
320:           orig[j - 1] = orig[j];
321:         }
322:         origSize--;
323:         numAdj--;
324:         PetscSectionGetOffset(aSec,p,&aOff);
325:         for (s = 0; s < aDof; ++s) {
326:           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
327:             if (anchors[aOff+s] == orig[q]) break;
328:           }
329:           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
330:         }
331:       }
332:       else {
333:         i++;
334:       }
335:     }
336:     *adjSize = numAdj;
337:     ISRestoreIndices(aIS,&anchors);
338:   }
339:   return(0);
340: }

344: /*@
345:   DMPlexGetAdjacency - Return all points adjacent to the given point

347:   Input Parameters:
348: + dm - The DM object
349: . p  - The point
350: . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
351: - adj - Either NULL so that the array is allocated, or an existing array with size adjSize

353:   Output Parameters:
354: + adjSize - The number of adjacent points
355: - adj - The adjacent points

357:   Level: advanced

359:   Notes: The user must PetscFree the adj array if it was not passed in.

361: .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
362: @*/
363: PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
364: {
365:   DM_Plex       *mesh = (DM_Plex *) dm->data;

372:   DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);
373:   return(0);
374: }
377: /*@
378:   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity

380:   Collective on DM

382:   Input Parameters:
383: + dm      - The DM
384: - sfPoint - The PetscSF which encodes point connectivity

386:   Output Parameters:
387: + processRanks - A list of process neighbors, or NULL
388: - sfProcess    - An SF encoding the two-sided process connectivity, or NULL

390:   Level: developer

392: .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
393: @*/
394: PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
395: {
396:   const PetscSFNode *remotePoints;
397:   PetscInt          *localPointsNew;
398:   PetscSFNode       *remotePointsNew;
399:   const PetscInt    *nranks;
400:   PetscInt          *ranksNew;
401:   PetscBT            neighbors;
402:   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
403:   PetscMPIInt        numProcs, proc, rank;
404:   PetscErrorCode     ierr;

411:   MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);
412:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
413:   PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);
414:   PetscBTCreate(numProcs, &neighbors);
415:   PetscBTMemzero(numProcs, neighbors);
416:   /* Compute root-to-leaf process connectivity */
417:   PetscSectionGetChart(rootRankSection, &pStart, &pEnd);
418:   ISGetIndices(rootRanks, &nranks);
419:   for (p = pStart; p < pEnd; ++p) {
420:     PetscInt ndof, noff, n;

422:     PetscSectionGetDof(rootRankSection, p, &ndof);
423:     PetscSectionGetOffset(rootRankSection, p, &noff);
424:     for (n = 0; n < ndof; ++n) {PetscBTSet(neighbors, nranks[noff+n]);}
425:   }
426:   ISRestoreIndices(rootRanks, &nranks);
427:   /* Compute leaf-to-neighbor process connectivity */
428:   PetscSectionGetChart(leafRankSection, &pStart, &pEnd);
429:   ISGetIndices(leafRanks, &nranks);
430:   for (p = pStart; p < pEnd; ++p) {
431:     PetscInt ndof, noff, n;

433:     PetscSectionGetDof(leafRankSection, p, &ndof);
434:     PetscSectionGetOffset(leafRankSection, p, &noff);
435:     for (n = 0; n < ndof; ++n) {PetscBTSet(neighbors, nranks[noff+n]);}
436:   }
437:   ISRestoreIndices(leafRanks, &nranks);
438:   /* Compute leaf-to-root process connectivity */
439:   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
440:   /* Calculate edges */
441:   PetscBTClear(neighbors, rank);
442:   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
443:   PetscMalloc1(numNeighbors, &ranksNew);
444:   PetscMalloc1(numNeighbors, &localPointsNew);
445:   PetscMalloc1(numNeighbors, &remotePointsNew);
446:   for(proc = 0, n = 0; proc < numProcs; ++proc) {
447:     if (PetscBTLookup(neighbors, proc)) {
448:       ranksNew[n]              = proc;
449:       localPointsNew[n]        = proc;
450:       remotePointsNew[n].index = rank;
451:       remotePointsNew[n].rank  = proc;
452:       ++n;
453:     }
454:   }
455:   PetscBTDestroy(&neighbors);
456:   if (processRanks) {ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);}
457:   else              {PetscFree(ranksNew);}
458:   if (sfProcess) {
459:     PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);
460:     PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");
461:     PetscSFSetFromOptions(*sfProcess);
462:     PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);
463:   }
464:   return(0);
465: }

469: /*@
470:   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.

472:   Collective on DM

474:   Input Parameter:
475: . dm - The DM

477:   Output Parameters:
478: + rootSection - The number of leaves for a given root point
479: . rootrank    - The rank of each edge into the root point
480: . leafSection - The number of processes sharing a given leaf point
481: - leafrank    - The rank of each process sharing a leaf point

483:   Level: developer

485: .seealso: DMPlexCreateOverlap()
486: @*/
487: PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
488: {
489:   MPI_Comm        comm;
490:   PetscSF         sfPoint;
491:   const PetscInt *rootdegree;
492:   PetscInt       *myrank, *remoterank;
493:   PetscInt        pStart, pEnd, p, nedges;
494:   PetscMPIInt     rank;
495:   PetscErrorCode  ierr;

498:   PetscObjectGetComm((PetscObject) dm, &comm);
499:   MPI_Comm_rank(comm, &rank);
500:   DMPlexGetChart(dm, &pStart, &pEnd);
501:   DMGetPointSF(dm, &sfPoint);
502:   /* Compute number of leaves for each root */
503:   PetscObjectSetName((PetscObject) rootSection, "Root Section");
504:   PetscSectionSetChart(rootSection, pStart, pEnd);
505:   PetscSFComputeDegreeBegin(sfPoint, &rootdegree);
506:   PetscSFComputeDegreeEnd(sfPoint, &rootdegree);
507:   for (p = pStart; p < pEnd; ++p) {PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);}
508:   PetscSectionSetUp(rootSection);
509:   /* Gather rank of each leaf to root */
510:   PetscSectionGetStorageSize(rootSection, &nedges);
511:   PetscMalloc1(pEnd-pStart, &myrank);
512:   PetscMalloc1(nedges,  &remoterank);
513:   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
514:   PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);
515:   PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);
516:   PetscFree(myrank);
517:   ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);
518:   /* Distribute remote ranks to leaves */
519:   PetscObjectSetName((PetscObject) leafSection, "Leaf Section");
520:   DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);
521:   return(0);
522: }

526: /*@C
527:   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.

529:   Collective on DM

531:   Input Parameters:
532: + dm          - The DM
533: . rootSection - The number of leaves for a given root point
534: . rootrank    - The rank of each edge into the root point
535: . leafSection - The number of processes sharing a given leaf point
536: - leafrank    - The rank of each process sharing a leaf point

538:   Output Parameters:
539: + ovRootSection - The number of new overlap points for each neighboring process
540: . ovRootPoints  - The new overlap points for each neighboring process
541: . ovLeafSection - The number of new overlap points from each neighboring process
542: - ovLeafPoints  - The new overlap points from each neighboring process

544:   Level: developer

546: .seealso: DMPlexDistributeOwnership()
547: @*/
548: PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
549: {
550:   MPI_Comm           comm;
551:   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
552:   PetscSF            sfPoint, sfProc;
553:   IS                 valueIS;
554:   DMLabel            ovLeafLabel;
555:   const PetscSFNode *remote;
556:   const PetscInt    *local;
557:   const PetscInt    *nrank, *rrank, *neighbors;
558:   PetscSFNode       *ovRootPoints, *ovLeafPoints, *remotePoints;
559:   PetscSection       ovRootSection, ovLeafSection;
560:   PetscInt          *adj = NULL;
561:   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize;
562:   PetscInt           idx, numRemote;
563:   PetscMPIInt        rank, numProcs;
564:   PetscBool          useCone, useClosure, flg;
565:   PetscErrorCode     ierr;

568:   PetscObjectGetComm((PetscObject) dm, &comm);
569:   MPI_Comm_size(comm, &numProcs);
570:   MPI_Comm_rank(comm, &rank);
571:   DMGetPointSF(dm, &sfPoint);
572:   DMPlexGetChart(dm, &pStart, &pEnd);
573:   PetscSectionGetChart(leafSection, &sStart, &sEnd);
574:   PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
575:   DMLabelCreate("Overlap adjacency", &ovAdjByRank);
576:   /* Handle leaves: shared with the root point */
577:   for (l = 0; l < nleaves; ++l) {
578:     PetscInt adjSize = PETSC_DETERMINE, a;

580:     DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);
581:     for (a = 0; a < adjSize; ++a) {DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);}
582:   }
583:   ISGetIndices(rootrank, &rrank);
584:   ISGetIndices(leafrank, &nrank);
585:   /* Handle roots */
586:   for (p = pStart; p < pEnd; ++p) {
587:     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;

589:     if ((p >= sStart) && (p < sEnd)) {
590:       /* Some leaves share a root with other leaves on different processes */
591:       PetscSectionGetDof(leafSection, p, &neighbors);
592:       if (neighbors) {
593:         PetscSectionGetOffset(leafSection, p, &noff);
594:         DMPlexGetAdjacency(dm, p, &adjSize, &adj);
595:         for (n = 0; n < neighbors; ++n) {
596:           const PetscInt remoteRank = nrank[noff+n];

598:           if (remoteRank == rank) continue;
599:           for (a = 0; a < adjSize; ++a) {DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);}
600:         }
601:       }
602:     }
603:     /* Roots are shared with leaves */
604:     PetscSectionGetDof(rootSection, p, &neighbors);
605:     if (!neighbors) continue;
606:     PetscSectionGetOffset(rootSection, p, &noff);
607:     DMPlexGetAdjacency(dm, p, &adjSize, &adj);
608:     for (n = 0; n < neighbors; ++n) {
609:       const PetscInt remoteRank = rrank[noff+n];

611:       if (remoteRank == rank) continue;
612:       for (a = 0; a < adjSize; ++a) {DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);}
613:     }
614:   }
615:   PetscFree(adj);
616:   ISRestoreIndices(rootrank, &rrank);
617:   ISRestoreIndices(leafrank, &nrank);
618:   /* We require the closure in the overlap */
619:   DMPlexGetAdjacencyUseCone(dm, &useCone);
620:   DMPlexGetAdjacencyUseClosure(dm, &useClosure);
621:   if (useCone || !useClosure) {
622:     IS              rankIS,   pointIS;
623:     const PetscInt *ranks,   *points;
624:     PetscInt        numRanks, numPoints, r, p;

626:     DMLabelGetValueIS(ovAdjByRank, &rankIS);
627:     ISGetLocalSize(rankIS, &numRanks);
628:     ISGetIndices(rankIS, &ranks);
629:     for (r = 0; r < numRanks; ++r) {
630:       const PetscInt rank = ranks[r];

632:       DMLabelGetStratumIS(ovAdjByRank, rank, &pointIS);
633:       ISGetLocalSize(pointIS, &numPoints);
634:       ISGetIndices(pointIS, &points);
635:       for (p = 0; p < numPoints; ++p) {
636:         PetscInt *closure = NULL, closureSize, c;

638:         DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
639:         for (c = 0; c < closureSize*2; c += 2) {DMLabelSetValue(ovAdjByRank, closure[c], rank);}
640:         DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
641:       }
642:       ISRestoreIndices(pointIS, &points);
643:       ISDestroy(&pointIS);
644:     }
645:     ISRestoreIndices(rankIS, &ranks);
646:     ISDestroy(&rankIS);
647:   }
648:   PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);
649:   if (flg) {
650:     PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);
651:     DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);
652:   }
653:   /* Convert to (point, rank) and use actual owners */
654:   PetscSectionCreate(comm, &ovRootSection);
655:   PetscObjectSetName((PetscObject) ovRootSection, "Overlap Root Section");
656:   PetscSectionSetChart(ovRootSection, 0, numProcs);
657:   DMLabelGetValueIS(ovAdjByRank, &valueIS);
658:   ISGetLocalSize(valueIS, &numNeighbors);
659:   ISGetIndices(valueIS, &neighbors);
660:   for (n = 0; n < numNeighbors; ++n) {
661:     PetscInt numPoints;

663:     DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);
664:     PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);
665:   }
666:   PetscSectionSetUp(ovRootSection);
667:   PetscSectionGetStorageSize(ovRootSection, &ovSize);
668:   PetscMalloc1(ovSize, &ovRootPoints);
669:   PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
670:   for (n = 0; n < numNeighbors; ++n) {
671:     IS              pointIS;
672:     const PetscInt *points;
673:     PetscInt        off, numPoints, p;

675:     PetscSectionGetOffset(ovRootSection, neighbors[n], &off);
676:     DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);
677:     ISGetLocalSize(pointIS, &numPoints);
678:     ISGetIndices(pointIS, &points);
679:     for (p = 0; p < numPoints; ++p) {
680:       PetscFindInt(points[p], nleaves, local, &l);
681:       if (l >= 0) {ovRootPoints[off+p] = remote[l];}
682:       else        {ovRootPoints[off+p].index = points[p]; ovRootPoints[off+p].rank = rank;}
683:     }
684:     ISRestoreIndices(pointIS, &points);
685:     ISDestroy(&pointIS);
686:   }
687:   ISRestoreIndices(valueIS, &neighbors);
688:   ISDestroy(&valueIS);
689:   DMLabelDestroy(&ovAdjByRank);
690:   /* Make process SF */
691:   DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);
692:   if (flg) {
693:     PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);
694:   }
695:   /* Communicate overlap */
696:   PetscSectionCreate(comm, &ovLeafSection);
697:   PetscObjectSetName((PetscObject) ovLeafSection, "Overlap Leaf Section");
698:   DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, ovRootPoints, ovLeafSection, (void**) &ovLeafPoints);
699:   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
700:   DMLabelCreate("ovLeafs", &ovLeafLabel);
701:   PetscSectionGetStorageSize(ovLeafSection, &ovSize);
702:   for (p = 0; p < ovSize; p++) {
703:     /* Don't import points from yourself */
704:     if (ovLeafPoints[p].rank == rank) continue;
705:     DMLabelSetValue(ovLeafLabel, ovLeafPoints[p].index, ovLeafPoints[p].rank);
706:   }
707:   /* Don't import points already in the pointSF */
708:   for (l = 0; l < nleaves; ++l) {
709:     DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);
710:   }
711:   for (numRemote = 0, n = 0; n < numProcs; ++n) {
712:     PetscInt numPoints;
713:     DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);
714:     numRemote += numPoints;
715:   }
716:   PetscMalloc1(numRemote, &remotePoints);
717:   for (idx = 0, n = 0; n < numProcs; ++n) {
718:     IS remoteRootIS;
719:     PetscInt numPoints;
720:     const PetscInt *remoteRoots;
721:     DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);
722:     if (numPoints <= 0) continue;
723:     DMLabelGetStratumIS(ovLeafLabel, n, &remoteRootIS);
724:     ISGetIndices(remoteRootIS, &remoteRoots);
725:     for (p = 0; p < numPoints; p++) {
726:       remotePoints[idx].index = remoteRoots[p];
727:       remotePoints[idx].rank = n;
728:       idx++;
729:     }
730:     ISRestoreIndices(remoteRootIS, &remoteRoots);
731:     ISDestroy(&remoteRootIS);
732:   }
733:   DMLabelDestroy(&ovLeafLabel);
734:   PetscSFCreate(PetscObjectComm((PetscObject) dm), overlapSF);
735:   PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");
736:   PetscSFSetFromOptions(*overlapSF);
737:   PetscSFSetGraph(*overlapSF, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
738:   if (flg) {
739:     PetscPrintf(comm, "Overlap SF\n");
740:     PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);
741:   }
742:   /* Clean up */
743:   PetscSFDestroy(&sfProc);
744:   PetscFree(ovRootPoints);
745:   PetscSectionDestroy(&ovRootSection);
746:   PetscFree(ovLeafPoints);
747:   PetscSectionDestroy(&ovLeafSection);
748:   return(0);
749: }

753: PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
754: {
755:   MPI_Comm           comm;
756:   PetscMPIInt        rank, numProcs;
757:   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
758:   PetscInt          *pointDepths, *remoteDepths, *ilocal;
759:   PetscInt          *depthRecv, *depthShift, *depthIdx;
760:   PetscSFNode       *iremote;
761:   PetscSF            pointSF;
762:   const PetscInt    *sharedLocal;
763:   const PetscSFNode *overlapRemote, *sharedRemote;
764:   PetscErrorCode     ierr;

768:   PetscObjectGetComm((PetscObject)dm, &comm);
769:   MPI_Comm_rank(comm, &rank);
770:   MPI_Comm_size(comm, &numProcs);
771:   DMGetDimension(dm, &dim);

773:   /* Before building the migration SF we need to know the new stratum offsets */
774:   PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);
775:   PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);
776:   for (d=0; d<dim+1; d++) {
777:     DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);
778:     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
779:   }
780:   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
781:   PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);
782:   PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);

784:   /* Count recevied points in each stratum and compute the internal strata shift */
785:   PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);
786:   for (d=0; d<dim+1; d++) depthRecv[d]=0;
787:   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
788:   depthShift[dim] = 0;
789:   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
790:   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
791:   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
792:   for (d=0; d<dim+1; d++) {
793:     DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);
794:     depthIdx[d] = pStart + depthShift[d];
795:   }

797:   /* Form the overlap SF build an SF that describes the full overlap migration SF */
798:   DMPlexGetChart(dm, &pStart, &pEnd);
799:   newLeaves = pEnd - pStart + nleaves;
800:   PetscMalloc1(newLeaves, &ilocal);
801:   PetscMalloc1(newLeaves, &iremote);
802:   /* First map local points to themselves */
803:   for (d=0; d<dim+1; d++) {
804:     DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);
805:     for (p=pStart; p<pEnd; p++) {
806:       point = p + depthShift[d];
807:       ilocal[point] = point;
808:       iremote[point].index = p;
809:       iremote[point].rank = rank;
810:       depthIdx[d]++;
811:     }
812:   }

814:   /* Add in the remote roots for currently shared points */
815:   DMGetPointSF(dm, &pointSF);
816:   PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);
817:   for (d=0; d<dim+1; d++) {
818:     DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);
819:     for (p=0; p<numSharedPoints; p++) {
820:       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
821:         point = sharedLocal[p] + depthShift[d];
822:         iremote[point].index = sharedRemote[p].index;
823:         iremote[point].rank = sharedRemote[p].rank;
824:       }
825:     }
826:   }

828:   /* Now add the incoming overlap points */
829:   for (p=0; p<nleaves; p++) {
830:     point = depthIdx[remoteDepths[p]];
831:     ilocal[point] = point;
832:     iremote[point].index = overlapRemote[p].index;
833:     iremote[point].rank = overlapRemote[p].rank;
834:     depthIdx[remoteDepths[p]]++;
835:   }
836:   PetscFree2(pointDepths,remoteDepths);

838:   PetscSFCreate(comm, migrationSF);
839:   PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");
840:   PetscSFSetFromOptions(*migrationSF);
841:   DMPlexGetChart(dm, &pStart, &pEnd);
842:   PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);

844:   PetscFree3(depthRecv, depthShift, depthIdx);
845:   return(0);
846: }

850: /*@
851:   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution

853:   Collective on DM

855:   Input Parameters:
856: + dm - The DMPlex object
857: . pointSF - The PetscSF describing the communication pattern
858: . originalSection - The PetscSection for existing data layout
859: - originalVec - The existing data

861:   Output Parameters:
862: + newSection - The PetscSF describing the new data layout
863: - newVec - The new data

865:   Level: developer

867: .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
868: @*/
869: PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
870: {
871:   PetscSF        fieldSF;
872:   PetscInt      *remoteOffsets, fieldSize;
873:   PetscScalar   *originalValues, *newValues;

877:   PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);
878:   PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);

880:   PetscSectionGetStorageSize(newSection, &fieldSize);
881:   VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);
882:   VecSetType(newVec,dm->vectype);

884:   VecGetArray(originalVec, &originalValues);
885:   VecGetArray(newVec, &newValues);
886:   PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);
887:   PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);
888:   PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);
889:   PetscSFDestroy(&fieldSF);
890:   VecRestoreArray(newVec, &newValues);
891:   VecRestoreArray(originalVec, &originalValues);
892:   PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);
893:   return(0);
894: }

898: /*@
899:   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution

901:   Collective on DM

903:   Input Parameters:
904: + dm - The DMPlex object
905: . pointSF - The PetscSF describing the communication pattern
906: . originalSection - The PetscSection for existing data layout
907: - originalIS - The existing data

909:   Output Parameters:
910: + newSection - The PetscSF describing the new data layout
911: - newIS - The new data

913:   Level: developer

915: .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
916: @*/
917: PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
918: {
919:   PetscSF         fieldSF;
920:   PetscInt       *newValues, *remoteOffsets, fieldSize;
921:   const PetscInt *originalValues;
922:   PetscErrorCode  ierr;

925:   PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);
926:   PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);

928:   PetscSectionGetStorageSize(newSection, &fieldSize);
929:   PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);

931:   ISGetIndices(originalIS, &originalValues);
932:   PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);
933:   PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);
934:   PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);
935:   PetscSFDestroy(&fieldSF);
936:   ISRestoreIndices(originalIS, &originalValues);
937:   ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);
938:   PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);
939:   return(0);
940: }

944: /*@
945:   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution

947:   Collective on DM

949:   Input Parameters:
950: + dm - The DMPlex object
951: . pointSF - The PetscSF describing the communication pattern
952: . originalSection - The PetscSection for existing data layout
953: . datatype - The type of data
954: - originalData - The existing data

956:   Output Parameters:
957: + newSection - The PetscSection describing the new data layout
958: - newData - The new data

960:   Level: developer

962: .seealso: DMPlexDistribute(), DMPlexDistributeField()
963: @*/
964: PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
965: {
966:   PetscSF        fieldSF;
967:   PetscInt      *remoteOffsets, fieldSize;
968:   PetscMPIInt    dataSize;

972:   PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);
973:   PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);

975:   PetscSectionGetStorageSize(newSection, &fieldSize);
976:   MPI_Type_size(datatype, &dataSize);
977:   PetscMalloc(fieldSize * dataSize, newData);

979:   PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);
980:   PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);
981:   PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);
982:   PetscSFDestroy(&fieldSF);
983:   PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);
984:   return(0);
985: }

989: PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
990: {
991:   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
992:   MPI_Comm               comm;
993:   PetscSF                coneSF;
994:   PetscSection           originalConeSection, newConeSection;
995:   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
996:   PetscBool              flg;
997:   PetscErrorCode         ierr;

1002:   PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);

1004:   /* Distribute cone section */
1005:   PetscObjectGetComm((PetscObject)dm, &comm);
1006:   DMPlexGetConeSection(dm, &originalConeSection);
1007:   DMPlexGetConeSection(dmParallel, &newConeSection);
1008:   PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);
1009:   DMSetUp(dmParallel);
1010:   {
1011:     PetscInt pStart, pEnd, p;

1013:     PetscSectionGetChart(newConeSection, &pStart, &pEnd);
1014:     for (p = pStart; p < pEnd; ++p) {
1015:       PetscInt coneSize;
1016:       PetscSectionGetDof(newConeSection, p, &coneSize);
1017:       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
1018:     }
1019:   }
1020:   /* Communicate and renumber cones */
1021:   PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);
1022:   DMPlexGetCones(dm, &cones);
1023:   DMPlexGetCones(dmParallel, &newCones);
1024:   PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);
1025:   PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);
1026:   PetscSectionGetStorageSize(newConeSection, &newConesSize);
1027:   ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);
1028: #if PETSC_USE_DEBUG
1029:   {
1030:     PetscInt  p;
1031:     PetscBool valid = PETSC_TRUE;
1032:     for (p = 0; p < newConesSize; ++p) {
1033:       if (newCones[p] < 0) {valid = PETSC_FALSE; PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);}
1034:     }
1035:     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1036:   }
1037: #endif
1038:   PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);
1039:   if (flg) {
1040:     PetscPrintf(comm, "Serial Cone Section:\n");
1041:     PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);
1042:     PetscPrintf(comm, "Parallel Cone Section:\n");
1043:     PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);
1044:     PetscSFView(coneSF, NULL);
1045:   }
1046:   DMPlexGetConeOrientations(dm, &cones);
1047:   DMPlexGetConeOrientations(dmParallel, &newCones);
1048:   PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);
1049:   PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);
1050:   PetscSFDestroy(&coneSF);
1051:   PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);
1052:   /* Create supports and stratify sieve */
1053:   {
1054:     PetscInt pStart, pEnd;

1056:     PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);
1057:     PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);
1058:   }
1059:   DMPlexSymmetrize(dmParallel);
1060:   DMPlexStratify(dmParallel);
1061:   return(0);
1062: }

1066: PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1067: {
1068:   MPI_Comm         comm;
1069:   PetscSection     originalCoordSection, newCoordSection;
1070:   Vec              originalCoordinates, newCoordinates;
1071:   PetscInt         bs;
1072:   const char      *name;
1073:   const PetscReal *maxCell, *L;
1074:   PetscErrorCode   ierr;


1080:   PetscObjectGetComm((PetscObject)dm, &comm);
1081:   DMGetCoordinateSection(dm, &originalCoordSection);
1082:   DMGetCoordinateSection(dmParallel, &newCoordSection);
1083:   DMGetCoordinatesLocal(dm, &originalCoordinates);
1084:   if (originalCoordinates) {
1085:     VecCreate(comm, &newCoordinates);
1086:     PetscObjectGetName((PetscObject) originalCoordinates, &name);
1087:     PetscObjectSetName((PetscObject) newCoordinates, name);

1089:     DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);
1090:     DMSetCoordinatesLocal(dmParallel, newCoordinates);
1091:     VecGetBlockSize(originalCoordinates, &bs);
1092:     VecSetBlockSize(newCoordinates, bs);
1093:     VecDestroy(&newCoordinates);
1094:   }
1095:   DMGetPeriodicity(dm, &maxCell, &L);
1096:   if (L) {DMSetPeriodicity(dmParallel, maxCell, L);}
1097:   return(0);
1098: }

1102: /* Here we are assuming that process 0 always has everything */
1103: PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1104: {
1105:   MPI_Comm       comm;
1106:   PetscMPIInt    rank;
1107:   PetscInt       numLabels, numLocalLabels, l;
1108:   PetscBool      hasLabels = PETSC_FALSE;

1114:   PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);
1115:   PetscObjectGetComm((PetscObject)dm, &comm);
1116:   MPI_Comm_rank(comm, &rank);

1118:   /* Everyone must have either the same number of labels, or none */
1119:   DMPlexGetNumLabels(dm, &numLocalLabels);
1120:   numLabels = numLocalLabels;
1121:   MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);
1122:   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1123:   for (l = numLabels-1; l >= 0; --l) {
1124:     DMLabel     label = NULL, labelNew = NULL;
1125:     PetscBool   isdepth;

1127:     if (hasLabels) {
1128:       DMPlexGetLabelByNum(dm, l, &label);
1129:       /* Skip "depth" because it is recreated */
1130:       PetscStrcmp(label->name, "depth", &isdepth);
1131:     }
1132:     MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);
1133:     if (isdepth) continue;
1134:     DMLabelDistribute(label, migrationSF, &labelNew);
1135:     DMPlexAddLabel(dmParallel, labelNew);
1136:   }
1137:   PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);
1138:   return(0);
1139: }

1143: PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1144: {
1145:   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1146:   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1147:   MPI_Comm        comm;
1148:   const PetscInt *gpoints;
1149:   PetscInt        dim, depth, n, d;
1150:   PetscErrorCode  ierr;


1156:   PetscObjectGetComm((PetscObject)dm, &comm);
1157:   DMGetDimension(dm, &dim);

1159:   /* Setup hybrid structure */
1160:   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1161:   MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);
1162:   ISLocalToGlobalMappingGetSize(renumbering, &n);
1163:   ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);
1164:   DMPlexGetDepth(dm, &depth);
1165:   for (d = 0; d <= dim; ++d) {
1166:     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;

1168:     if (pmax < 0) continue;
1169:     DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);
1170:     DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);
1171:     MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);
1172:     for (p = 0; p < n; ++p) {
1173:       const PetscInt point = gpoints[p];

1175:       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1176:     }
1177:     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1178:     else            pmesh->hybridPointMax[d] = -1;
1179:   }
1180:   ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);
1181:   return(0);
1182: }

1186: PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1187: {
1188:   MPI_Comm        comm;
1189:   DM              refTree;
1190:   PetscSection    origParentSection, newParentSection;
1191:   PetscInt        *origParents, *origChildIDs;
1192:   PetscBool       flg;
1193:   PetscErrorCode  ierr;

1198:   PetscObjectGetComm((PetscObject)dm, &comm);

1200:   /* Set up tree */
1201:   DMPlexGetReferenceTree(dm,&refTree);
1202:   DMPlexSetReferenceTree(dmParallel,refTree);
1203:   DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);
1204:   if (origParentSection) {
1205:     PetscInt        pStart, pEnd;
1206:     PetscInt        *newParents, *newChildIDs;
1207:     PetscInt        *remoteOffsetsParents, newParentSize;
1208:     PetscSF         parentSF;

1210:     DMPlexGetChart(dmParallel, &pStart, &pEnd);
1211:     PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);
1212:     PetscSectionSetChart(newParentSection,pStart,pEnd);
1213:     PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);
1214:     PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);
1215:     PetscSectionGetStorageSize(newParentSection,&newParentSize);
1216:     PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);
1217:     PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);
1218:     PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);
1219:     PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);
1220:     PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);
1221:     ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);
1222:     PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);
1223:     if (flg) {
1224:       PetscPrintf(comm, "Serial Parent Section: \n");
1225:       PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);
1226:       PetscPrintf(comm, "Parallel Parent Section: \n");
1227:       PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);
1228:       PetscSFView(parentSF, NULL);
1229:     }
1230:     DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);
1231:     PetscSectionDestroy(&newParentSection);
1232:     PetscFree2(newParents,newChildIDs);
1233:     PetscSFDestroy(&parentSF);
1234:   }
1235:   return(0);
1236: }

1240: PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1241: {
1242:   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1243:   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1244:   PetscMPIInt            rank, numProcs;
1245:   MPI_Comm               comm;
1246:   PetscErrorCode         ierr;


1252:   /* Create point SF for parallel mesh */
1253:   PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);
1254:   PetscObjectGetComm((PetscObject)dm, &comm);
1255:   MPI_Comm_rank(comm, &rank);
1256:   MPI_Comm_size(comm, &numProcs);
1257:   {
1258:     const PetscInt *leaves;
1259:     PetscSFNode    *remotePoints, *rowners, *lowners;
1260:     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1261:     PetscInt        pStart, pEnd;

1263:     DMPlexGetChart(dmParallel, &pStart, &pEnd);
1264:     PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);
1265:     PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);
1266:     for (p=0; p<numRoots; p++) {
1267:       rowners[p].rank  = -1;
1268:       rowners[p].index = -1;
1269:     }
1270:     if (origPart) {
1271:       /* Make sure points in the original partition are not assigned to other procs */
1272:       const PetscInt *origPoints;

1274:       ISGetIndices(origPart, &origPoints);
1275:       for (p = 0; p < numProcs; ++p) {
1276:         PetscInt dof, off, d;

1278:         PetscSectionGetDof(origPartSection, p, &dof);
1279:         PetscSectionGetOffset(origPartSection, p, &off);
1280:         for (d = off; d < off+dof; ++d) {
1281:           rowners[origPoints[d]].rank = p;
1282:         }
1283:       }
1284:       ISRestoreIndices(origPart, &origPoints);
1285:     }
1286:     PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);
1287:     PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);
1288:     for (p = 0; p < numLeaves; ++p) {
1289:       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1290:         lowners[p].rank  = rank;
1291:         lowners[p].index = leaves ? leaves[p] : p;
1292:       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1293:         lowners[p].rank  = -2;
1294:         lowners[p].index = -2;
1295:       }
1296:     }
1297:     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1298:       rowners[p].rank  = -3;
1299:       rowners[p].index = -3;
1300:     }
1301:     PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);
1302:     PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);
1303:     PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);
1304:     PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);
1305:     for (p = 0; p < numLeaves; ++p) {
1306:       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1307:       if (lowners[p].rank != rank) ++numGhostPoints;
1308:     }
1309:     PetscMalloc1(numGhostPoints, &ghostPoints);
1310:     PetscMalloc1(numGhostPoints, &remotePoints);
1311:     for (p = 0, gp = 0; p < numLeaves; ++p) {
1312:       if (lowners[p].rank != rank) {
1313:         ghostPoints[gp]        = leaves ? leaves[p] : p;
1314:         remotePoints[gp].rank  = lowners[p].rank;
1315:         remotePoints[gp].index = lowners[p].index;
1316:         ++gp;
1317:       }
1318:     }
1319:     PetscFree2(rowners,lowners);
1320:     PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
1321:     PetscSFSetFromOptions((dmParallel)->sf);
1322:   }
1323:   pmesh->useCone    = mesh->useCone;
1324:   pmesh->useClosure = mesh->useClosure;
1325:   PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);
1326:   return(0);
1327: }

1331: /*@C
1332:   DMPlexDistribute - Distributes the mesh and any associated sections.

1334:   Not Collective

1336:   Input Parameter:
1337: + dm  - The original DMPlex object
1338: - overlap - The overlap of partitions, 0 is the default

1340:   Output Parameter:
1341: + sf - The PetscSF used for point distribution
1342: - parallelMesh - The distributed DMPlex object, or NULL

1344:   Note: If the mesh was not distributed, the return value is NULL.

1346:   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1347:   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1348:   representation on the mesh.

1350:   Level: intermediate

1352: .keywords: mesh, elements
1353: .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1354: @*/
1355: PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1356: {
1357:   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1358:   MPI_Comm               comm;
1359:   PetscInt               dim, numRemoteRanks, nroots, nleaves;
1360:   DM                     dmOverlap;
1361:   IS                     cellPart,        part;
1362:   PetscSection           cellPartSection, partSection;
1363:   PetscSFNode           *remoteRanks, *newRemote;
1364:   const PetscSFNode     *oldRemote;
1365:   PetscSF                partSF, pointSF, overlapPointSF, overlapSF;
1366:   ISLocalToGlobalMapping renumbering;
1367:   PetscBool              flg;
1368:   PetscMPIInt            rank, numProcs, p;
1369:   PetscErrorCode         ierr;


1376:   PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);
1377:   PetscObjectGetComm((PetscObject)dm,&comm);
1378:   MPI_Comm_rank(comm, &rank);
1379:   MPI_Comm_size(comm, &numProcs);

1381:   *dmParallel = NULL;
1382:   if (numProcs == 1) return(0);

1384:   DMGetDimension(dm, &dim);
1385:   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1386:   PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);
1387:   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1388:   PetscSectionCreate(comm, &cellPartSection);
1389:   PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);
1390:   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1391:   if (!rank) numRemoteRanks = numProcs;
1392:   else       numRemoteRanks = 0;
1393:   PetscMalloc1(numRemoteRanks, &remoteRanks);
1394:   for (p = 0; p < numRemoteRanks; ++p) {
1395:     remoteRanks[p].rank  = p;
1396:     remoteRanks[p].index = 0;
1397:   }
1398:   PetscSFCreate(comm, &partSF);
1399:   PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);
1400:   PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);
1401:   if (flg) {
1402:     PetscPrintf(comm, "Original Cell Partition:\n");
1403:     PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);
1404:     ISView(cellPart, NULL);
1405:     PetscSFView(partSF, NULL);
1406:   }
1407:   /* Close the partition over the mesh */
1408:   DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);
1409:   /* Create new mesh */
1410:   DMPlexCreate(comm, dmParallel);
1411:   DMSetDimension(*dmParallel, dim);
1412:   PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");
1413:   pmesh = (DM_Plex*) (*dmParallel)->data;
1414:   pmesh->useAnchors = mesh->useAnchors;

1416:   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1417:   PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);
1418:   if (flg) {
1419:     PetscPrintf(comm, "Point Partition:\n");
1420:     PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);
1421:     ISView(part, NULL);
1422:     PetscSFView(pointSF, NULL);
1423:     PetscPrintf(comm, "Point Renumbering after partition:\n");
1424:     ISLocalToGlobalMappingView(renumbering, NULL);
1425:   }
1426:   PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);

1428:   /* Migrate data to a non-overlapping parallel DM */
1429:   DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);
1430:   DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);
1431:   DMPlexDistributeLabels(dm, pointSF, *dmParallel);
1432:   DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);
1433:   DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);

1435:   /* Build the point SF without overlap */
1436:   DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);
1437:   if (flg) {
1438:     PetscPrintf(comm, "Point SF:\n");
1439:     PetscSFView((*dmParallel)->sf, NULL);
1440:   }

1442:   if (overlap > 0) {
1443:     PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);
1444:     /* Add the partition overlap to the distributed DM */
1445:     DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);
1446:     DMDestroy(dmParallel);
1447:     *dmParallel = dmOverlap;
1448:     if (flg) {
1449:       PetscPrintf(comm, "Overlap Migration SF:\n");
1450:       PetscSFView(overlapSF, NULL);
1451:     }

1453:     /* Re-map the pointSF to establish the full migration pattern */
1454:     PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);
1455:     PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);
1456:     PetscMalloc1(nleaves, &newRemote);
1457:     PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);
1458:     PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);
1459:     PetscSFCreate(comm, &overlapPointSF);
1460:     PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);
1461:     PetscSFDestroy(&overlapSF);
1462:     PetscSFDestroy(&pointSF);
1463:     pointSF = overlapPointSF;
1464:     PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);
1465:   }
1466:   /* Cleanup Partition */
1467:   ISLocalToGlobalMappingDestroy(&renumbering);
1468:   PetscSFDestroy(&partSF);
1469:   PetscSectionDestroy(&partSection);
1470:   ISDestroy(&part);
1471:   PetscSectionDestroy(&cellPartSection);
1472:   ISDestroy(&cellPart);
1473:   /* Copy BC */
1474:   DMPlexCopyBoundary(dm, *dmParallel);
1475:   /* Cleanup */
1476:   if (sf) {*sf = pointSF;}
1477:   else    {PetscSFDestroy(&pointSF);}
1478:   DMSetFromOptions(*dmParallel);
1479:   PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);
1480:   return(0);
1481: }

1485: /*@C
1486:   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.

1488:   Not Collective

1490:   Input Parameter:
1491: + dm  - The non-overlapping distrbuted DMPlex object
1492: - overlap - The overlap of partitions, 0 is the default

1494:   Output Parameter:
1495: + sf - The PetscSF used for point distribution
1496: - dmOverlap - The overlapping distributed DMPlex object, or NULL

1498:   Note: If the mesh was not distributed, the return value is NULL.

1500:   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1501:   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1502:   representation on the mesh.

1504:   Level: intermediate

1506: .keywords: mesh, elements
1507: .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1508: @*/
1509: PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1510: {
1511:   MPI_Comm               comm;
1512:   PetscMPIInt            rank;
1513:   PetscSection           rootSection, leafSection;
1514:   IS                     rootrank, leafrank;
1515:   PetscSection           coneSection;
1516:   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1517:   PetscSFNode           *ghostRemote;
1518:   const PetscSFNode     *overlapRemote;
1519:   ISLocalToGlobalMapping overlapRenumbering;
1520:   const PetscInt        *renumberingArray, *overlapLocal;
1521:   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1522:   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1523:   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1524:   PetscErrorCode         ierr;


1531:   PetscObjectGetComm((PetscObject)dm,&comm);
1532:   MPI_Comm_rank(comm, &rank);
1533:   DMGetDimension(dm, &dim);

1535:   /* Compute point overlap with neighbouring processes on the distributed DM */
1536:   PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);
1537:   PetscSectionCreate(comm, &rootSection);
1538:   PetscSectionCreate(comm, &leafSection);
1539:   DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);
1540:   DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);
1541:   PetscSectionDestroy(&rootSection);
1542:   PetscSectionDestroy(&leafSection);
1543:   ISDestroy(&rootrank);
1544:   ISDestroy(&leafrank);
1545:   PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);

1547:   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1548:   DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);

1550:   /* Convert cones to global numbering before migrating them */
1551:   DMPlexGetConeSection(dm, &coneSection);
1552:   PetscSectionGetStorageSize(coneSection, &conesSize);
1553:   DMPlexGetCones(dm, &cones);
1554:   ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);

1556:   /* Derive the new local-to-global mapping from the old one */
1557:   PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);
1558:   PetscMalloc1(overlapLeaves, &overlapRenumberingArray);
1559:   ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);
1560:   PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);
1561:   PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);
1562:   ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);

1564:   /* Build the overlapping DM */
1565:   DMPlexCreate(comm, dmOverlap);
1566:   DMSetDimension(*dmOverlap, dim);
1567:   PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");
1568:   DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);
1569:   DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);
1570:   DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);
1571:   DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);

1573:   /* Build the new point SF by propagating the depthShift generate remote root indices */
1574:   DMGetPointSF(dm, &pointSF);
1575:   PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);
1576:   PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);
1577:   numGhostPoints = numSharedPoints + numOverlapPoints;
1578:   PetscMalloc1(numGhostPoints, &ghostLocal);
1579:   PetscMalloc1(numGhostPoints, &ghostRemote);
1580:   DMPlexGetChart(dm, &pStart, &pEnd);
1581:   PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);
1582:   for (p=0; p<overlapLeaves; p++) {
1583:     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1584:   }
1585:   PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);
1586:   PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);
1587:   for (idx=0, p=0; p<overlapLeaves; p++) {
1588:     if (overlapRemote[p].rank != rank) {
1589:       ghostLocal[idx] = overlapLocal[p];
1590:       ghostRemote[idx].index = recvPointIDs[p];
1591:       ghostRemote[idx].rank = overlapRemote[p].rank;
1592:       idx++;
1593:     }
1594:   }
1595:   DMPlexGetChart(*dmOverlap, &pStart, &pEnd);
1596:   PetscSFCreate(comm, &newPointSF);;
1597:   PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);
1598:   DMSetPointSF(*dmOverlap, newPointSF);
1599:   PetscSFDestroy(&newPointSF);
1600:   /* Cleanup overlap partition */
1601:   ISLocalToGlobalMappingDestroy(&overlapRenumbering);
1602:   PetscSFDestroy(&overlapSF);
1603:   PetscFree2(pointIDs, recvPointIDs);
1604:   if (sf) *sf = migrationSF;
1605:   else    {PetscSFDestroy(&migrationSF);}
1606:   return(0);
1607: }