Actual source code: plexpartition.c

petsc-3.8.0 2017-09-26
Report Typos and Errors
  1:  #include <petsc/private/dmpleximpl.h>

  3: PetscClassId PETSCPARTITIONER_CLASSID = 0;

  5: PetscFunctionList PetscPartitionerList              = NULL;
  6: PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;

  8: PetscBool ChacoPartitionercite = PETSC_FALSE;
  9: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
 10:                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
 11:                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
 12:                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
 13:                                "  isbn      = {0-89791-816-9},\n"
 14:                                "  pages     = {28},\n"
 15:                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
 16:                                "  publisher = {ACM Press},\n"
 17:                                "  address   = {New York},\n"
 18:                                "  year      = {1995}\n}\n";

 20: PetscBool ParMetisPartitionercite = PETSC_FALSE;
 21: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
 22:                                "  author  = {George Karypis and Vipin Kumar},\n"
 23:                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
 24:                                "  journal = {Journal of Parallel and Distributed Computing},\n"
 25:                                "  volume  = {48},\n"
 26:                                "  pages   = {71--85},\n"
 27:                                "  year    = {1998}\n}\n";

 29: /*@C
 30:   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner

 32:   Input Parameters:
 33: + dm      - The mesh DM dm
 34: - height  - Height of the strata from which to construct the graph

 36:   Output Parameter:
 37: + numVertices     - Number of vertices in the graph
 38: . offsets         - Point offsets in the graph
 39: . adjacency       - Point connectivity in the graph
 40: - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.

 42:   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
 43:   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
 44:   representation on the mesh.

 46:   Level: developer

 48: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
 49: @*/
 50: PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
 51: {
 52:   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
 53:   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
 54:   IS             cellNumbering;
 55:   const PetscInt *cellNum;
 56:   PetscBool      useCone, useClosure;
 57:   PetscSection   section;
 58:   PetscSegBuffer adjBuffer;
 59:   PetscSF        sfPoint;
 60:   PetscMPIInt    rank;

 64:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
 65:   DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);
 66:   DMGetPointSF(dm, &sfPoint);
 67:   PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);
 68:   /* Build adjacency graph via a section/segbuffer */
 69:   PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);
 70:   PetscSectionSetChart(section, pStart, pEnd);
 71:   PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);
 72:   /* Always use FVM adjacency to create partitioner graph */
 73:   DMPlexGetAdjacencyUseCone(dm, &useCone);
 74:   DMPlexGetAdjacencyUseClosure(dm, &useClosure);
 75:   DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);
 76:   DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);
 77:   DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);
 78:   if (globalNumbering) {
 79:     PetscObjectReference((PetscObject)cellNumbering);
 80:     *globalNumbering = cellNumbering;
 81:   }
 82:   ISGetIndices(cellNumbering, &cellNum);
 83:   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
 84:     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
 85:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
 86:     adjSize = PETSC_DETERMINE;
 87:     DMPlexGetAdjacency(dm, p, &adjSize, &adj);
 88:     for (a = 0; a < adjSize; ++a) {
 89:       const PetscInt point = adj[a];
 90:       if (point != p && pStart <= point && point < pEnd) {
 91:         PetscInt *PETSC_RESTRICT pBuf;
 92:         PetscSectionAddDof(section, p, 1);
 93:         PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
 94:         *pBuf = point;
 95:       }
 96:     }
 97:     (*numVertices)++;
 98:   }
 99:   DMPlexSetAdjacencyUseCone(dm, useCone);
100:   DMPlexSetAdjacencyUseClosure(dm, useClosure);
101:   /* Derive CSR graph from section/segbuffer */
102:   PetscSectionSetUp(section);
103:   PetscSectionGetStorageSize(section, &size);
104:   PetscMalloc1(*numVertices+1, &vOffsets);
105:   for (idx = 0, p = pStart; p < pEnd; p++) {
106:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107:     PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
108:   }
109:   vOffsets[*numVertices] = size;
110:   if (offsets) *offsets = vOffsets;
111:   PetscSegBufferExtractAlloc(adjBuffer, &graph);
112:   {
113:     ISLocalToGlobalMapping ltogCells;
114:     PetscInt n, size, *cells_arr;
115:     /* In parallel, apply a global cell numbering to the graph */
116:     ISRestoreIndices(cellNumbering, &cellNum);
117:     ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);
118:     ISLocalToGlobalMappingGetSize(ltogCells, &size);
119:     ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);
120:     /* Convert to positive global cell numbers */
121:     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122:     ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);
123:     ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);
124:     ISLocalToGlobalMappingDestroy(&ltogCells);
125:     ISDestroy(&cellNumbering);
126:   }
127:   if (adjacency) *adjacency = graph;
128:   /* Clean up */
129:   PetscSegBufferDestroy(&adjBuffer);
130:   PetscSectionDestroy(&section);
131:   PetscFree(adj);
132:   return(0);
133: }

135: /*@C
136:   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.

138:   Collective

140:   Input Arguments:
141: + dm - The DMPlex
142: - cellHeight - The height of mesh points to treat as cells (default should be 0)

144:   Output Arguments:
145: + numVertices - The number of local vertices in the graph, or cells in the mesh.
146: . offsets     - The offset to the adjacency list for each cell
147: - adjacency   - The adjacency list for all cells

149:   Note: This is suitable for input to a mesh partitioner like ParMetis.

151:   Level: advanced

153: .seealso: DMPlexCreate()
154: @*/
155: PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156: {
157:   const PetscInt maxFaceCases = 30;
158:   PetscInt       numFaceCases = 0;
159:   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160:   PetscInt      *off, *adj;
161:   PetscInt      *neighborCells = NULL;
162:   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;

166:   /* For parallel partitioning, I think you have to communicate supports */
167:   DMGetDimension(dm, &dim);
168:   cellDim = dim - cellHeight;
169:   DMPlexGetDepth(dm, &depth);
170:   DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);
171:   if (cEnd - cStart == 0) {
172:     if (numVertices) *numVertices = 0;
173:     if (offsets)   *offsets   = NULL;
174:     if (adjacency) *adjacency = NULL;
175:     return(0);
176:   }
177:   numCells  = cEnd - cStart;
178:   faceDepth = depth - cellHeight;
179:   if (dim == depth) {
180:     PetscInt f, fStart, fEnd;

182:     PetscCalloc1(numCells+1, &off);
183:     /* Count neighboring cells */
184:     DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);
185:     for (f = fStart; f < fEnd; ++f) {
186:       const PetscInt *support;
187:       PetscInt        supportSize;
188:       DMPlexGetSupportSize(dm, f, &supportSize);
189:       DMPlexGetSupport(dm, f, &support);
190:       if (supportSize == 2) {
191:         PetscInt numChildren;

193:         DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
194:         if (!numChildren) {
195:           ++off[support[0]-cStart+1];
196:           ++off[support[1]-cStart+1];
197:         }
198:       }
199:     }
200:     /* Prefix sum */
201:     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202:     if (adjacency) {
203:       PetscInt *tmp;

205:       PetscMalloc1(off[numCells], &adj);
206:       PetscMalloc1(numCells+1, &tmp);
207:       PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));
208:       /* Get neighboring cells */
209:       for (f = fStart; f < fEnd; ++f) {
210:         const PetscInt *support;
211:         PetscInt        supportSize;
212:         DMPlexGetSupportSize(dm, f, &supportSize);
213:         DMPlexGetSupport(dm, f, &support);
214:         if (supportSize == 2) {
215:           PetscInt numChildren;

217:           DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
218:           if (!numChildren) {
219:             adj[tmp[support[0]-cStart]++] = support[1];
220:             adj[tmp[support[1]-cStart]++] = support[0];
221:           }
222:         }
223:       }
224: #if defined(PETSC_USE_DEBUG)
225:       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226: #endif
227:       PetscFree(tmp);
228:     }
229:     if (numVertices) *numVertices = numCells;
230:     if (offsets)   *offsets   = off;
231:     if (adjacency) *adjacency = adj;
232:     return(0);
233:   }
234:   /* Setup face recognition */
235:   if (faceDepth == 1) {
236:     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */

238:     for (c = cStart; c < cEnd; ++c) {
239:       PetscInt corners;

241:       DMPlexGetConeSize(dm, c, &corners);
242:       if (!cornersSeen[corners]) {
243:         PetscInt nFV;

245:         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246:         cornersSeen[corners] = 1;

248:         DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);

250:         numFaceVertices[numFaceCases++] = nFV;
251:       }
252:     }
253:   }
254:   PetscCalloc1(numCells+1, &off);
255:   /* Count neighboring cells */
256:   for (cell = cStart; cell < cEnd; ++cell) {
257:     PetscInt numNeighbors = PETSC_DETERMINE, n;

259:     DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
260:     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261:     for (n = 0; n < numNeighbors; ++n) {
262:       PetscInt        cellPair[2];
263:       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264:       PetscInt        meetSize = 0;
265:       const PetscInt *meet    = NULL;

267:       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268:       if (cellPair[0] == cellPair[1]) continue;
269:       if (!found) {
270:         DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
271:         if (meetSize) {
272:           PetscInt f;

274:           for (f = 0; f < numFaceCases; ++f) {
275:             if (numFaceVertices[f] == meetSize) {
276:               found = PETSC_TRUE;
277:               break;
278:             }
279:           }
280:         }
281:         DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
282:       }
283:       if (found) ++off[cell-cStart+1];
284:     }
285:   }
286:   /* Prefix sum */
287:   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];

289:   if (adjacency) {
290:     PetscMalloc1(off[numCells], &adj);
291:     /* Get neighboring cells */
292:     for (cell = cStart; cell < cEnd; ++cell) {
293:       PetscInt numNeighbors = PETSC_DETERMINE, n;
294:       PetscInt cellOffset   = 0;

296:       DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
297:       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298:       for (n = 0; n < numNeighbors; ++n) {
299:         PetscInt        cellPair[2];
300:         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301:         PetscInt        meetSize = 0;
302:         const PetscInt *meet    = NULL;

304:         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305:         if (cellPair[0] == cellPair[1]) continue;
306:         if (!found) {
307:           DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
308:           if (meetSize) {
309:             PetscInt f;

311:             for (f = 0; f < numFaceCases; ++f) {
312:               if (numFaceVertices[f] == meetSize) {
313:                 found = PETSC_TRUE;
314:                 break;
315:               }
316:             }
317:           }
318:           DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
319:         }
320:         if (found) {
321:           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322:           ++cellOffset;
323:         }
324:       }
325:     }
326:   }
327:   PetscFree(neighborCells);
328:   if (numVertices) *numVertices = numCells;
329:   if (offsets)   *offsets   = off;
330:   if (adjacency) *adjacency = adj;
331:   return(0);
332: }

334: /*@C
335:   PetscPartitionerRegister - Adds a new PetscPartitioner implementation

337:   Not Collective

339:   Input Parameters:
340: + name        - The name of a new user-defined creation routine
341: - create_func - The creation routine itself

343:   Notes:
344:   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners

346:   Sample usage:
347: .vb
348:     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349: .ve

351:   Then, your PetscPartitioner type can be chosen with the procedural interface via
352: .vb
353:     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354:     PetscPartitionerSetType(PetscPartitioner, "my_part");
355: .ve
356:    or at runtime via the option
357: .vb
358:     -petscpartitioner_type my_part
359: .ve

361:   Level: advanced

363: .keywords: PetscPartitioner, register
364: .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()

366: @*/
367: PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368: {

372:   PetscFunctionListAdd(&PetscPartitionerList, sname, function);
373:   return(0);
374: }

376: /*@C
377:   PetscPartitionerSetType - Builds a particular PetscPartitioner

379:   Collective on PetscPartitioner

381:   Input Parameters:
382: + part - The PetscPartitioner object
383: - name - The kind of partitioner

385:   Options Database Key:
386: . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types

388:   Level: intermediate

390: .keywords: PetscPartitioner, set, type
391: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392: @*/
393: PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394: {
395:   PetscErrorCode (*r)(PetscPartitioner);
396:   PetscBool      match;

401:   PetscObjectTypeCompare((PetscObject) part, name, &match);
402:   if (match) return(0);

404:   PetscPartitionerRegisterAll();
405:   PetscFunctionListFind(PetscPartitionerList, name, &r);
406:   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);

408:   if (part->ops->destroy) {
409:     (*part->ops->destroy)(part);
410:     part->ops->destroy = NULL;
411:   }
412:   (*r)(part);
413:   PetscObjectChangeTypeName((PetscObject) part, name);
414:   return(0);
415: }

417: /*@C
418:   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.

420:   Not Collective

422:   Input Parameter:
423: . part - The PetscPartitioner

425:   Output Parameter:
426: . name - The PetscPartitioner type name

428:   Level: intermediate

430: .keywords: PetscPartitioner, get, type, name
431: .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432: @*/
433: PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434: {

440:   PetscPartitionerRegisterAll();
441:   *name = ((PetscObject) part)->type_name;
442:   return(0);
443: }

445: /*@C
446:   PetscPartitionerView - Views a PetscPartitioner

448:   Collective on PetscPartitioner

450:   Input Parameter:
451: + part - the PetscPartitioner object to view
452: - v    - the viewer

454:   Level: developer

456: .seealso: PetscPartitionerDestroy()
457: @*/
458: PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459: {

464:   if (!v) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);}
465:   if (part->ops->view) {(*part->ops->view)(part, v);}
466:   return(0);
467: }

469: static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
470: {
472:   if (!currentType) {
473: #if defined(PETSC_HAVE_CHACO)
474:     *defaultType = PETSCPARTITIONERCHACO;
475: #elif defined(PETSC_HAVE_PARMETIS)
476:     *defaultType = PETSCPARTITIONERPARMETIS;
477: #else
478:     *defaultType = PETSCPARTITIONERSIMPLE;
479: #endif
480:   } else {
481:     *defaultType = currentType;
482:   }
483:   return(0);
484: }

486: PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
487: {
488:   const char    *defaultType;
489:   char           name[256];
490:   PetscBool      flg;

495:   PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);
496:   PetscPartitionerRegisterAll();

498:   PetscObjectOptionsBegin((PetscObject) part);
499:   PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);
500:   if (flg) {
501:     PetscPartitionerSetType(part, name);
502:   } else if (!((PetscObject) part)->type_name) {
503:     PetscPartitionerSetType(part, defaultType);
504:   }
505:   PetscOptionsEnd();
506:   return(0);
507: }

509: /*@
510:   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database

512:   Collective on PetscPartitioner

514:   Input Parameter:
515: . part - the PetscPartitioner object to set options for

517:   Level: developer

519: .seealso: PetscPartitionerView()
520: @*/
521: PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
522: {

527:   PetscPartitionerSetTypeFromOptions_Internal(part);

529:   PetscObjectOptionsBegin((PetscObject) part);
530:   if (part->ops->setfromoptions) {(*part->ops->setfromoptions)(PetscOptionsObject,part);}
531:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
532:   PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);
533:   PetscOptionsEnd();
534:   PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");
535:   return(0);
536: }

538: /*@C
539:   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner

541:   Collective on PetscPartitioner

543:   Input Parameter:
544: . part - the PetscPartitioner object to setup

546:   Level: developer

548: .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
549: @*/
550: PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
551: {

556:   if (part->ops->setup) {(*part->ops->setup)(part);}
557:   return(0);
558: }

560: /*@
561:   PetscPartitionerDestroy - Destroys a PetscPartitioner object

563:   Collective on PetscPartitioner

565:   Input Parameter:
566: . part - the PetscPartitioner object to destroy

568:   Level: developer

570: .seealso: PetscPartitionerView()
571: @*/
572: PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
573: {

577:   if (!*part) return(0);

580:   if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
581:   ((PetscObject) (*part))->refct = 0;

583:   if ((*part)->ops->destroy) {(*(*part)->ops->destroy)(*part);}
584:   PetscHeaderDestroy(part);
585:   return(0);
586: }

588: /*@
589:   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().

591:   Collective on MPI_Comm

593:   Input Parameter:
594: . comm - The communicator for the PetscPartitioner object

596:   Output Parameter:
597: . part - The PetscPartitioner object

599:   Level: beginner

601: .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
602: @*/
603: PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
604: {
605:   PetscPartitioner p;
606:   const char       *partitionerType = NULL;
607:   PetscErrorCode   ierr;

611:   *part = NULL;
612:   DMInitializePackage();

614:   PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);
615:   PetscPartitionerGetDefaultType(NULL,&partitionerType);
616:   PetscPartitionerSetType(p,partitionerType);

618:   *part = p;
619:   return(0);
620: }

622: /*@
623:   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh

625:   Collective on DM

627:   Input Parameters:
628: + part    - The PetscPartitioner
629: - dm      - The mesh DM

631:   Output Parameters:
632: + partSection     - The PetscSection giving the division of points by partition
633: - partition       - The list of points by partition

635:   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()

637:   Level: developer

639: .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
640: @*/
641: PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
642: {
643:   PetscMPIInt    size;

651:   MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
652:   if (size == 1) {
653:     PetscInt *points;
654:     PetscInt  cStart, cEnd, c;

656:     DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
657:     PetscSectionSetChart(partSection, 0, size);
658:     PetscSectionSetDof(partSection, 0, cEnd-cStart);
659:     PetscSectionSetUp(partSection);
660:     PetscMalloc1(cEnd-cStart, &points);
661:     for (c = cStart; c < cEnd; ++c) points[c] = c;
662:     ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);
663:     return(0);
664:   }
665:   if (part->height == 0) {
666:     PetscInt numVertices;
667:     PetscInt *start     = NULL;
668:     PetscInt *adjacency = NULL;
669:     IS       globalNumbering;

671:     DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);
672:     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
673:     (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);
674:     PetscFree(start);
675:     PetscFree(adjacency);
676:     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
677:       const PetscInt *globalNum;
678:       const PetscInt *partIdx;
679:       PetscInt *map, cStart, cEnd;
680:       PetscInt *adjusted, i, localSize, offset;
681:       IS    newPartition;

683:       ISGetLocalSize(*partition,&localSize);
684:       PetscMalloc1(localSize,&adjusted);
685:       ISGetIndices(globalNumbering,&globalNum);
686:       ISGetIndices(*partition,&partIdx);
687:       PetscMalloc1(localSize,&map);
688:       DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
689:       for (i = cStart, offset = 0; i < cEnd; i++) {
690:         if (globalNum[i - cStart] >= 0) map[offset++] = i;
691:       }
692:       for (i = 0; i < localSize; i++) {
693:         adjusted[i] = map[partIdx[i]];
694:       }
695:       PetscFree(map);
696:       ISRestoreIndices(*partition,&partIdx);
697:       ISRestoreIndices(globalNumbering,&globalNum);
698:       ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);
699:       ISDestroy(&globalNumbering);
700:       ISDestroy(partition);
701:       *partition = newPartition;
702:     }
703:   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
704:   return(0);

706: }

708: static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
709: {
710:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
711:   PetscErrorCode          ierr;

714:   PetscSectionDestroy(&p->section);
715:   ISDestroy(&p->partition);
716:   PetscFree(p);
717:   return(0);
718: }

720: static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
721: {
722:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
723:   PetscViewerFormat       format;
724:   PetscErrorCode          ierr;

727:   PetscViewerGetFormat(viewer, &format);
728:   PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");
729:   if (p->random) {
730:     PetscViewerASCIIPushTab(viewer);
731:     PetscViewerASCIIPrintf(viewer, "using random partition\n");
732:     PetscViewerASCIIPopTab(viewer);
733:   }
734:   return(0);
735: }

737: static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
738: {
739:   PetscBool      iascii;

745:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
746:   if (iascii) {PetscPartitionerView_Shell_Ascii(part, viewer);}
747:   return(0);
748: }

750: static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
751: {
752:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
753:   PetscErrorCode          ierr;

756:   PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");
757:   PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);
758:   PetscOptionsTail();
759:   return(0);
760: }

762: static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
763: {
764:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
765:   PetscInt                np;
766:   PetscErrorCode          ierr;

769:   if (p->random) {
770:     PetscRandom r;
771:     PetscInt   *sizes, *points, v, p;
772:     PetscMPIInt rank;

774:     MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
775:     PetscRandomCreate(PETSC_COMM_SELF, &r);
776:     PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);
777:     PetscRandomSetFromOptions(r);
778:     PetscCalloc2(nparts, &sizes, numVertices, &points);
779:     for (v = 0; v < numVertices; ++v) {points[v] = v;}
780:     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
781:     for (v = numVertices-1; v > 0; --v) {
782:       PetscReal val;
783:       PetscInt  w, tmp;

785:       PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));
786:       PetscRandomGetValueReal(r, &val);
787:       w    = PetscFloorReal(val);
788:       tmp       = points[v];
789:       points[v] = points[w];
790:       points[w] = tmp;
791:     }
792:     PetscRandomDestroy(&r);
793:     PetscPartitionerShellSetPartition(part, nparts, sizes, points);
794:     PetscFree2(sizes, points);
795:   }
796:   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
797:   PetscSectionGetChart(p->section, NULL, &np);
798:   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
799:   ISGetLocalSize(p->partition, &np);
800:   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
801:   PetscSectionCopy(p->section, partSection);
802:   *partition = p->partition;
803:   PetscObjectReference((PetscObject) p->partition);
804:   return(0);
805: }

807: static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
808: {
810:   part->ops->view           = PetscPartitionerView_Shell;
811:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
812:   part->ops->destroy        = PetscPartitionerDestroy_Shell;
813:   part->ops->partition      = PetscPartitionerPartition_Shell;
814:   return(0);
815: }

817: /*MC
818:   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object

820:   Level: intermediate

822: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
823: M*/

825: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
826: {
827:   PetscPartitioner_Shell *p;
828:   PetscErrorCode          ierr;

832:   PetscNewLog(part, &p);
833:   part->data = p;

835:   PetscPartitionerInitialize_Shell(part);
836:   p->random = PETSC_FALSE;
837:   return(0);
838: }

840: /*@C
841:   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh

843:   Collective on Part

845:   Input Parameters:
846: + part     - The PetscPartitioner
847: . size - The number of partitions
848: . sizes    - array of size size (or NULL) providing the number of points in each partition
849: - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)

851:   Level: developer

853:   Notes:

855:     It is safe to free the sizes and points arrays after use in this routine.

857: .seealso DMPlexDistribute(), PetscPartitionerCreate()
858: @*/
859: PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
860: {
861:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
862:   PetscInt                proc, numPoints;
863:   PetscErrorCode          ierr;

869:   PetscSectionDestroy(&p->section);
870:   ISDestroy(&p->partition);
871:   PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);
872:   PetscSectionSetChart(p->section, 0, size);
873:   if (sizes) {
874:     for (proc = 0; proc < size; ++proc) {
875:       PetscSectionSetDof(p->section, proc, sizes[proc]);
876:     }
877:   }
878:   PetscSectionSetUp(p->section);
879:   PetscSectionGetStorageSize(p->section, &numPoints);
880:   ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);
881:   return(0);
882: }

884: /*@
885:   PetscPartitionerShellSetRandom - Set the flag to use a random partition

887:   Collective on Part

889:   Input Parameters:
890: + part   - The PetscPartitioner
891: - random - The flag to use a random partition

893:   Level: intermediate

895: .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
896: @*/
897: PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
898: {
899:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

903:   p->random = random;
904:   return(0);
905: }

907: /*@
908:   PetscPartitionerShellGetRandom - get the flag to use a random partition

910:   Collective on Part

912:   Input Parameter:
913: . part   - The PetscPartitioner

915:   Output Parameter
916: . random - The flag to use a random partition

918:   Level: intermediate

920: .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
921: @*/
922: PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
923: {
924:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

929:   *random = p->random;
930:   return(0);
931: }

933: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
934: {
935:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
936:   PetscErrorCode          ierr;

939:   PetscFree(p);
940:   return(0);
941: }

943: static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
944: {
945:   PetscViewerFormat format;
946:   PetscErrorCode    ierr;

949:   PetscViewerGetFormat(viewer, &format);
950:   PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");
951:   return(0);
952: }

954: static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
955: {
956:   PetscBool      iascii;

962:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
963:   if (iascii) {PetscPartitionerView_Simple_Ascii(part, viewer);}
964:   return(0);
965: }

967: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
968: {
969:   MPI_Comm       comm;
970:   PetscInt       np;
971:   PetscMPIInt    size;

975:   comm = PetscObjectComm((PetscObject)dm);
976:   MPI_Comm_size(comm,&size);
977:   PetscSectionSetChart(partSection, 0, nparts);
978:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
979:   if (size == 1) {
980:     for (np = 0; np < nparts; ++np) {PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));}
981:   }
982:   else {
983:     PetscMPIInt rank;
984:     PetscInt nvGlobal, *offsets, myFirst, myLast;

986:     PetscMalloc1(size+1,&offsets);
987:     offsets[0] = 0;
988:     MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);
989:     for (np = 2; np <= size; np++) {
990:       offsets[np] += offsets[np-1];
991:     }
992:     nvGlobal = offsets[size];
993:     MPI_Comm_rank(comm,&rank);
994:     myFirst = offsets[rank];
995:     myLast  = offsets[rank + 1] - 1;
996:     PetscFree(offsets);
997:     if (numVertices) {
998:       PetscInt firstPart = 0, firstLargePart = 0;
999:       PetscInt lastPart = 0, lastLargePart = 0;
1000:       PetscInt rem = nvGlobal % nparts;
1001:       PetscInt pSmall = nvGlobal/nparts;
1002:       PetscInt pBig = nvGlobal/nparts + 1;


1005:       if (rem) {
1006:         firstLargePart = myFirst / pBig;
1007:         lastLargePart  = myLast  / pBig;

1009:         if (firstLargePart < rem) {
1010:           firstPart = firstLargePart;
1011:         }
1012:         else {
1013:           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1014:         }
1015:         if (lastLargePart < rem) {
1016:           lastPart = lastLargePart;
1017:         }
1018:         else {
1019:           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1020:         }
1021:       }
1022:       else {
1023:         firstPart = myFirst / (nvGlobal/nparts);
1024:         lastPart  = myLast  / (nvGlobal/nparts);
1025:       }

1027:       for (np = firstPart; np <= lastPart; np++) {
1028:         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1029:         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);

1031:         PartStart = PetscMax(PartStart,myFirst);
1032:         PartEnd   = PetscMin(PartEnd,myLast+1);
1033:         PetscSectionSetDof(partSection,np,PartEnd-PartStart);
1034:       }
1035:     }
1036:   }
1037:   PetscSectionSetUp(partSection);
1038:   return(0);
1039: }

1041: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1042: {
1044:   part->ops->view      = PetscPartitionerView_Simple;
1045:   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1046:   part->ops->partition = PetscPartitionerPartition_Simple;
1047:   return(0);
1048: }

1050: /*MC
1051:   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object

1053:   Level: intermediate

1055: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1056: M*/

1058: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1059: {
1060:   PetscPartitioner_Simple *p;
1061:   PetscErrorCode           ierr;

1065:   PetscNewLog(part, &p);
1066:   part->data = p;

1068:   PetscPartitionerInitialize_Simple(part);
1069:   return(0);
1070: }

1072: static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1073: {
1074:   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1075:   PetscErrorCode          ierr;

1078:   PetscFree(p);
1079:   return(0);
1080: }

1082: static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1083: {
1084:   PetscViewerFormat format;
1085:   PetscErrorCode    ierr;

1088:   PetscViewerGetFormat(viewer, &format);
1089:   PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");
1090:   return(0);
1091: }

1093: static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1094: {
1095:   PetscBool      iascii;

1101:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1102:   if (iascii) {PetscPartitionerView_Gather_Ascii(part, viewer);}
1103:   return(0);
1104: }

1106: static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1107: {
1108:   PetscInt       np;

1112:   PetscSectionSetChart(partSection, 0, nparts);
1113:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1114:   PetscSectionSetDof(partSection,0,numVertices);
1115:   for (np = 1; np < nparts; ++np) {PetscSectionSetDof(partSection, np, 0);}
1116:   PetscSectionSetUp(partSection);
1117:   return(0);
1118: }

1120: static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1121: {
1123:   part->ops->view      = PetscPartitionerView_Gather;
1124:   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1125:   part->ops->partition = PetscPartitionerPartition_Gather;
1126:   return(0);
1127: }

1129: /*MC
1130:   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object

1132:   Level: intermediate

1134: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1135: M*/

1137: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1138: {
1139:   PetscPartitioner_Gather *p;
1140:   PetscErrorCode           ierr;

1144:   PetscNewLog(part, &p);
1145:   part->data = p;

1147:   PetscPartitionerInitialize_Gather(part);
1148:   return(0);
1149: }


1152: static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1153: {
1154:   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1155:   PetscErrorCode          ierr;

1158:   PetscFree(p);
1159:   return(0);
1160: }

1162: static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1163: {
1164:   PetscViewerFormat format;
1165:   PetscErrorCode    ierr;

1168:   PetscViewerGetFormat(viewer, &format);
1169:   PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");
1170:   return(0);
1171: }

1173: static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1174: {
1175:   PetscBool      iascii;

1181:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1182:   if (iascii) {PetscPartitionerView_Chaco_Ascii(part, viewer);}
1183:   return(0);
1184: }

1186: #if defined(PETSC_HAVE_CHACO)
1187: #if defined(PETSC_HAVE_UNISTD_H)
1188: #include <unistd.h>
1189: #endif
1190: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1191: #include <chaco.h>
1192: #else
1193: /* Older versions of Chaco do not have an include file */
1194: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1195:                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1196:                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1197:                        int mesh_dims[3], double *goal, int global_method, int local_method,
1198:                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1199: #endif
1200: extern int FREE_GRAPH;
1201: #endif

1203: static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1204: {
1205: #if defined(PETSC_HAVE_CHACO)
1206:   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1207:   MPI_Comm       comm;
1208:   int            nvtxs          = numVertices; /* number of vertices in full graph */
1209:   int           *vwgts          = NULL;   /* weights for all vertices */
1210:   float         *ewgts          = NULL;   /* weights for all edges */
1211:   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1212:   char          *outassignname  = NULL;   /*  name of assignment output file */
1213:   char          *outfilename    = NULL;   /* output file name */
1214:   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1215:   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1216:   int            mesh_dims[3];            /* dimensions of mesh of processors */
1217:   double        *goal          = NULL;    /* desired set sizes for each set */
1218:   int            global_method = 1;       /* global partitioning algorithm */
1219:   int            local_method  = 1;       /* local partitioning algorithm */
1220:   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1221:   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1222:   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1223:   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1224:   long           seed          = 123636512; /* for random graph mutations */
1225: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1226:   int           *assignment;              /* Output partition */
1227: #else
1228:   short int     *assignment;              /* Output partition */
1229: #endif
1230:   int            fd_stdout, fd_pipe[2];
1231:   PetscInt      *points;
1232:   int            i, v, p;

1236:   PetscObjectGetComm((PetscObject)dm,&comm);
1237: #if defined (PETSC_USE_DEBUG)
1238:   {
1239:     int ival,isum;
1240:     PetscBool distributed;

1242:     ival = (numVertices > 0);
1243:     MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);
1244:     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1245:     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1246:   }
1247: #endif
1248:   if (!numVertices) {
1249:     PetscSectionSetChart(partSection, 0, nparts);
1250:     PetscSectionSetUp(partSection);
1251:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
1252:     return(0);
1253:   }
1254:   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1255:   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];

1257:   if (global_method == INERTIAL_METHOD) {
1258:     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1259:     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1260:   }
1261:   mesh_dims[0] = nparts;
1262:   mesh_dims[1] = 1;
1263:   mesh_dims[2] = 1;
1264:   PetscMalloc1(nvtxs, &assignment);
1265:   /* Chaco outputs to stdout. We redirect this to a buffer. */
1266:   /* TODO: check error codes for UNIX calls */
1267: #if defined(PETSC_HAVE_UNISTD_H)
1268:   {
1269:     int piperet;
1270:     piperet = pipe(fd_pipe);
1271:     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1272:     fd_stdout = dup(1);
1273:     close(1);
1274:     dup2(fd_pipe[1], 1);
1275:   }
1276: #endif
1277:   interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1278:                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1279:                    vmax, ndims, eigtol, seed);
1280: #if defined(PETSC_HAVE_UNISTD_H)
1281:   {
1282:     char msgLog[10000];
1283:     int  count;

1285:     fflush(stdout);
1286:     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1287:     if (count < 0) count = 0;
1288:     msgLog[count] = 0;
1289:     close(1);
1290:     dup2(fd_stdout, 1);
1291:     close(fd_stdout);
1292:     close(fd_pipe[0]);
1293:     close(fd_pipe[1]);
1294:     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1295:   }
1296: #else
1297:   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1298: #endif
1299:   /* Convert to PetscSection+IS */
1300:   PetscSectionSetChart(partSection, 0, nparts);
1301:   for (v = 0; v < nvtxs; ++v) {
1302:     PetscSectionAddDof(partSection, assignment[v], 1);
1303:   }
1304:   PetscSectionSetUp(partSection);
1305:   PetscMalloc1(nvtxs, &points);
1306:   for (p = 0, i = 0; p < nparts; ++p) {
1307:     for (v = 0; v < nvtxs; ++v) {
1308:       if (assignment[v] == p) points[i++] = v;
1309:     }
1310:   }
1311:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1312:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1313:   if (global_method == INERTIAL_METHOD) {
1314:     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1315:   }
1316:   PetscFree(assignment);
1317:   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1318:   return(0);
1319: #else
1320:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1321: #endif
1322: }

1324: static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1325: {
1327:   part->ops->view      = PetscPartitionerView_Chaco;
1328:   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1329:   part->ops->partition = PetscPartitionerPartition_Chaco;
1330:   return(0);
1331: }

1333: /*MC
1334:   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library

1336:   Level: intermediate

1338: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1339: M*/

1341: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1342: {
1343:   PetscPartitioner_Chaco *p;
1344:   PetscErrorCode          ierr;

1348:   PetscNewLog(part, &p);
1349:   part->data = p;

1351:   PetscPartitionerInitialize_Chaco(part);
1352:   PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);
1353:   return(0);
1354: }

1356: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1357: {
1358:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1359:   PetscErrorCode             ierr;

1362:   PetscFree(p);
1363:   return(0);
1364: }

1366: static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1367: {
1368:   PetscViewerFormat format;
1369:   PetscErrorCode    ierr;

1372:   PetscViewerGetFormat(viewer, &format);
1373:   PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");
1374:   return(0);
1375: }

1377: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1378: {
1379:   PetscBool      iascii;

1385:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1386:   if (iascii) {PetscPartitionerView_ParMetis_Ascii(part, viewer);}
1387:   return(0);
1388: }

1390: #if defined(PETSC_HAVE_PARMETIS)
1391: #include <parmetis.h>
1392: #endif

1394: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1395: {
1396: #if defined(PETSC_HAVE_PARMETIS)
1397:   MPI_Comm       comm;
1398:   PetscSection   section;
1399:   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1400:   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1401:   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1402:   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1403:   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1404:   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1405:   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1406:   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1407:   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1408:   real_t        *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1409:   real_t        *ubvec;                    /* The balance intolerance for vertex weights */
1410:   PetscInt       options[5];               /* Options */
1411:   /* Outputs */
1412:   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1413:   PetscInt      *assignment, *points;
1414:   PetscMPIInt    rank, p, v, i;

1418:   PetscObjectGetComm((PetscObject) part, &comm);
1419:   MPI_Comm_rank(comm, &rank);
1420:   options[0] = 0; /* Use all defaults */
1421:   /* Calculate vertex distribution */
1422:   PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);
1423:   vtxdist[0] = 0;
1424:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1425:   for (p = 2; p <= nparts; ++p) {
1426:     vtxdist[p] += vtxdist[p-1];
1427:   }
1428:   /* Calculate weights */
1429:   for (p = 0; p < nparts; ++p) {
1430:     tpwgts[p] = 1.0/nparts;
1431:   }
1432:   ubvec[0] = 1.05;
1433:   /* Weight cells by dofs on cell by default */
1434:   DMGetDefaultSection(dm, &section);
1435:   if (section) {
1436:     PetscInt cStart, cEnd, dof;

1438:     DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);
1439:     for (v = cStart; v < cEnd; ++v) {
1440:       PetscSectionGetDof(section, v, &dof);
1441:       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1442:       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1443:       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1444:     }
1445:   } else {
1446:     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1447:   }

1449:   if (nparts == 1) {
1450:     PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1451:   } else {
1452:     if (vtxdist[1] == vtxdist[nparts]) {
1453:       if (!rank) {
1454:         PetscStackPush("METIS_PartGraphKway");
1455:         METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1456:         PetscStackPop;
1457:         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1458:       }
1459:     } else {
1460:       PetscStackPush("ParMETIS_V3_PartKway");
1461:       ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1462:       PetscStackPop;
1463:       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1464:     }
1465:   }
1466:   /* Convert to PetscSection+IS */
1467:   PetscSectionSetChart(partSection, 0, nparts);
1468:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1469:   PetscSectionSetUp(partSection);
1470:   PetscMalloc1(nvtxs, &points);
1471:   for (p = 0, i = 0; p < nparts; ++p) {
1472:     for (v = 0; v < nvtxs; ++v) {
1473:       if (assignment[v] == p) points[i++] = v;
1474:     }
1475:   }
1476:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1477:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1478:   PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);
1479:   return(0);
1480: #else
1481:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1482: #endif
1483: }

1485: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1486: {
1488:   part->ops->view      = PetscPartitionerView_ParMetis;
1489:   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1490:   part->ops->partition = PetscPartitionerPartition_ParMetis;
1491:   return(0);
1492: }

1494: /*MC
1495:   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library

1497:   Level: intermediate

1499: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1500: M*/

1502: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1503: {
1504:   PetscPartitioner_ParMetis *p;
1505:   PetscErrorCode          ierr;

1509:   PetscNewLog(part, &p);
1510:   part->data = p;

1512:   PetscPartitionerInitialize_ParMetis(part);
1513:   PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);
1514:   return(0);
1515: }

1517: /*@
1518:   DMPlexGetPartitioner - Get the mesh partitioner

1520:   Not collective

1522:   Input Parameter:
1523: . dm - The DM

1525:   Output Parameter:
1526: . part - The PetscPartitioner

1528:   Level: developer

1530:   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.

1532: .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1533: @*/
1534: PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1535: {
1536:   DM_Plex       *mesh = (DM_Plex *) dm->data;

1541:   *part = mesh->partitioner;
1542:   return(0);
1543: }

1545: /*@
1546:   DMPlexSetPartitioner - Set the mesh partitioner

1548:   logically collective on dm and part

1550:   Input Parameters:
1551: + dm - The DM
1552: - part - The partitioner

1554:   Level: developer

1556:   Note: Any existing PetscPartitioner will be destroyed.

1558: .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1559: @*/
1560: PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1561: {
1562:   DM_Plex       *mesh = (DM_Plex *) dm->data;

1568:   PetscObjectReference((PetscObject)part);
1569:   PetscPartitionerDestroy(&mesh->partitioner);
1570:   mesh->partitioner = part;
1571:   return(0);
1572: }

1574: static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1575: {

1579:   if (up) {
1580:     PetscInt parent;

1582:     DMPlexGetTreeParent(dm,point,&parent,NULL);
1583:     if (parent != point) {
1584:       PetscInt closureSize, *closure = NULL, i;

1586:       DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1587:       for (i = 0; i < closureSize; i++) {
1588:         PetscInt cpoint = closure[2*i];

1590:         DMLabelSetValue(label,cpoint,rank);
1591:         DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);
1592:       }
1593:       DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1594:     }
1595:   }
1596:   if (down) {
1597:     PetscInt numChildren;
1598:     const PetscInt *children;

1600:     DMPlexGetTreeChildren(dm,point,&numChildren,&children);
1601:     if (numChildren) {
1602:       PetscInt i;

1604:       for (i = 0; i < numChildren; i++) {
1605:         PetscInt cpoint = children[i];

1607:         DMLabelSetValue(label,cpoint,rank);
1608:         DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);
1609:       }
1610:     }
1611:   }
1612:   return(0);
1613: }

1615: /*@
1616:   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label

1618:   Input Parameters:
1619: + dm     - The DM
1620: - label  - DMLabel assinging ranks to remote roots

1622:   Level: developer

1624: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1625: @*/
1626: PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1627: {
1628:   IS              rankIS,   pointIS;
1629:   const PetscInt *ranks,   *points;
1630:   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1631:   PetscInt       *closure = NULL;
1632:   PetscErrorCode  ierr;

1635:   DMLabelGetValueIS(label, &rankIS);
1636:   ISGetLocalSize(rankIS, &numRanks);
1637:   ISGetIndices(rankIS, &ranks);
1638:   for (r = 0; r < numRanks; ++r) {
1639:     const PetscInt rank = ranks[r];

1641:     DMLabelGetStratumIS(label, rank, &pointIS);
1642:     ISGetLocalSize(pointIS, &numPoints);
1643:     ISGetIndices(pointIS, &points);
1644:     for (p = 0; p < numPoints; ++p) {
1645:       DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
1646:       for (c = 0; c < closureSize*2; c += 2) {
1647:         DMLabelSetValue(label, closure[c], rank);
1648:         DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);
1649:       }
1650:     }
1651:     ISRestoreIndices(pointIS, &points);
1652:     ISDestroy(&pointIS);
1653:   }
1654:   if (closure) {DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);}
1655:   ISRestoreIndices(rankIS, &ranks);
1656:   ISDestroy(&rankIS);
1657:   return(0);
1658: }

1660: /*@
1661:   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label

1663:   Input Parameters:
1664: + dm     - The DM
1665: - label  - DMLabel assinging ranks to remote roots

1667:   Level: developer

1669: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1670: @*/
1671: PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1672: {
1673:   IS              rankIS,   pointIS;
1674:   const PetscInt *ranks,   *points;
1675:   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1676:   PetscInt       *adj = NULL;
1677:   PetscErrorCode  ierr;

1680:   DMLabelGetValueIS(label, &rankIS);
1681:   ISGetLocalSize(rankIS, &numRanks);
1682:   ISGetIndices(rankIS, &ranks);
1683:   for (r = 0; r < numRanks; ++r) {
1684:     const PetscInt rank = ranks[r];

1686:     DMLabelGetStratumIS(label, rank, &pointIS);
1687:     ISGetLocalSize(pointIS, &numPoints);
1688:     ISGetIndices(pointIS, &points);
1689:     for (p = 0; p < numPoints; ++p) {
1690:       adjSize = PETSC_DETERMINE;
1691:       DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);
1692:       for (a = 0; a < adjSize; ++a) {DMLabelSetValue(label, adj[a], rank);}
1693:     }
1694:     ISRestoreIndices(pointIS, &points);
1695:     ISDestroy(&pointIS);
1696:   }
1697:   ISRestoreIndices(rankIS, &ranks);
1698:   ISDestroy(&rankIS);
1699:   PetscFree(adj);
1700:   return(0);
1701: }

1703: /*@
1704:   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF

1706:   Input Parameters:
1707: + dm     - The DM
1708: - label  - DMLabel assinging ranks to remote roots

1710:   Level: developer

1712:   Note: This is required when generating multi-level overlaps to capture
1713:   overlap points from non-neighbouring partitions.

1715: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1716: @*/
1717: PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1718: {
1719:   MPI_Comm        comm;
1720:   PetscMPIInt     rank;
1721:   PetscSF         sfPoint;
1722:   DMLabel         lblRoots, lblLeaves;
1723:   IS              rankIS, pointIS;
1724:   const PetscInt *ranks;
1725:   PetscInt        numRanks, r;
1726:   PetscErrorCode  ierr;

1729:   PetscObjectGetComm((PetscObject) dm, &comm);
1730:   MPI_Comm_rank(comm, &rank);
1731:   DMGetPointSF(dm, &sfPoint);
1732:   /* Pull point contributions from remote leaves into local roots */
1733:   DMLabelGather(label, sfPoint, &lblLeaves);
1734:   DMLabelGetValueIS(lblLeaves, &rankIS);
1735:   ISGetLocalSize(rankIS, &numRanks);
1736:   ISGetIndices(rankIS, &ranks);
1737:   for (r = 0; r < numRanks; ++r) {
1738:     const PetscInt remoteRank = ranks[r];
1739:     if (remoteRank == rank) continue;
1740:     DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);
1741:     DMLabelInsertIS(label, pointIS, remoteRank);
1742:     ISDestroy(&pointIS);
1743:   }
1744:   ISRestoreIndices(rankIS, &ranks);
1745:   ISDestroy(&rankIS);
1746:   DMLabelDestroy(&lblLeaves);
1747:   /* Push point contributions from roots into remote leaves */
1748:   DMLabelDistribute(label, sfPoint, &lblRoots);
1749:   DMLabelGetValueIS(lblRoots, &rankIS);
1750:   ISGetLocalSize(rankIS, &numRanks);
1751:   ISGetIndices(rankIS, &ranks);
1752:   for (r = 0; r < numRanks; ++r) {
1753:     const PetscInt remoteRank = ranks[r];
1754:     if (remoteRank == rank) continue;
1755:     DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);
1756:     DMLabelInsertIS(label, pointIS, remoteRank);
1757:     ISDestroy(&pointIS);
1758:   }
1759:   ISRestoreIndices(rankIS, &ranks);
1760:   ISDestroy(&rankIS);
1761:   DMLabelDestroy(&lblRoots);
1762:   return(0);
1763: }

1765: /*@
1766:   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label

1768:   Input Parameters:
1769: + dm        - The DM
1770: . rootLabel - DMLabel assinging ranks to local roots
1771: . processSF - A star forest mapping into the local index on each remote rank

1773:   Output Parameter:
1774: - leafLabel - DMLabel assinging ranks to remote roots

1776:   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1777:   resulting leafLabel is a receiver mapping of remote roots to their parent rank.

1779:   Level: developer

1781: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1782: @*/
1783: PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1784: {
1785:   MPI_Comm           comm;
1786:   PetscMPIInt        rank, size;
1787:   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
1788:   PetscSF            sfPoint;
1789:   PetscSFNode       *rootPoints, *leafPoints;
1790:   PetscSection       rootSection, leafSection;
1791:   const PetscSFNode *remote;
1792:   const PetscInt    *local, *neighbors;
1793:   IS                 valueIS;
1794:   PetscErrorCode     ierr;

1797:   PetscObjectGetComm((PetscObject) dm, &comm);
1798:   MPI_Comm_rank(comm, &rank);
1799:   MPI_Comm_size(comm, &size);
1800:   DMGetPointSF(dm, &sfPoint);

1802:   /* Convert to (point, rank) and use actual owners */
1803:   PetscSectionCreate(comm, &rootSection);
1804:   PetscSectionSetChart(rootSection, 0, size);
1805:   DMLabelGetValueIS(rootLabel, &valueIS);
1806:   ISGetLocalSize(valueIS, &numNeighbors);
1807:   ISGetIndices(valueIS, &neighbors);
1808:   for (n = 0; n < numNeighbors; ++n) {
1809:     PetscInt numPoints;

1811:     DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);
1812:     PetscSectionAddDof(rootSection, neighbors[n], numPoints);
1813:   }
1814:   PetscSectionSetUp(rootSection);
1815:   PetscSectionGetStorageSize(rootSection, &ssize);
1816:   PetscMalloc1(ssize, &rootPoints);
1817:   PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
1818:   for (n = 0; n < numNeighbors; ++n) {
1819:     IS              pointIS;
1820:     const PetscInt *points;
1821:     PetscInt        off, numPoints, p;

1823:     PetscSectionGetOffset(rootSection, neighbors[n], &off);
1824:     DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);
1825:     ISGetLocalSize(pointIS, &numPoints);
1826:     ISGetIndices(pointIS, &points);
1827:     for (p = 0; p < numPoints; ++p) {
1828:       if (local) {PetscFindInt(points[p], nleaves, local, &l);}
1829:       else       {l = -1;}
1830:       if (l >= 0) {rootPoints[off+p] = remote[l];}
1831:       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1832:     }
1833:     ISRestoreIndices(pointIS, &points);
1834:     ISDestroy(&pointIS);
1835:   }
1836:   ISRestoreIndices(valueIS, &neighbors);
1837:   ISDestroy(&valueIS);
1838:   /* Communicate overlap */
1839:   PetscSectionCreate(comm, &leafSection);
1840:   DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);
1841:   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1842:   PetscSectionGetStorageSize(leafSection, &ssize);
1843:   for (p = 0; p < ssize; p++) {
1844:     DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);
1845:   }
1846:   PetscFree(rootPoints);
1847:   PetscSectionDestroy(&rootSection);
1848:   PetscFree(leafPoints);
1849:   PetscSectionDestroy(&leafSection);
1850:   return(0);
1851: }

1853: /*@
1854:   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points

1856:   Input Parameters:
1857: + dm    - The DM
1858: . label - DMLabel assinging ranks to remote roots

1860:   Output Parameter:
1861: - sf    - The star forest communication context encapsulating the defined mapping

1863:   Note: The incoming label is a receiver mapping of remote points to their parent rank.

1865:   Level: developer

1867: .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1868: @*/
1869: PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1870: {
1871:   PetscMPIInt     rank, size;
1872:   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1873:   PetscSFNode    *remotePoints;
1874:   IS              remoteRootIS;
1875:   const PetscInt *remoteRoots;

1879:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
1880:   MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);

1882:   for (numRemote = 0, n = 0; n < size; ++n) {
1883:     DMLabelGetStratumSize(label, n, &numPoints);
1884:     numRemote += numPoints;
1885:   }
1886:   PetscMalloc1(numRemote, &remotePoints);
1887:   /* Put owned points first */
1888:   DMLabelGetStratumSize(label, rank, &numPoints);
1889:   if (numPoints > 0) {
1890:     DMLabelGetStratumIS(label, rank, &remoteRootIS);
1891:     ISGetIndices(remoteRootIS, &remoteRoots);
1892:     for (p = 0; p < numPoints; p++) {
1893:       remotePoints[idx].index = remoteRoots[p];
1894:       remotePoints[idx].rank = rank;
1895:       idx++;
1896:     }
1897:     ISRestoreIndices(remoteRootIS, &remoteRoots);
1898:     ISDestroy(&remoteRootIS);
1899:   }
1900:   /* Now add remote points */
1901:   for (n = 0; n < size; ++n) {
1902:     DMLabelGetStratumSize(label, n, &numPoints);
1903:     if (numPoints <= 0 || n == rank) continue;
1904:     DMLabelGetStratumIS(label, n, &remoteRootIS);
1905:     ISGetIndices(remoteRootIS, &remoteRoots);
1906:     for (p = 0; p < numPoints; p++) {
1907:       remotePoints[idx].index = remoteRoots[p];
1908:       remotePoints[idx].rank = n;
1909:       idx++;
1910:     }
1911:     ISRestoreIndices(remoteRootIS, &remoteRoots);
1912:     ISDestroy(&remoteRootIS);
1913:   }
1914:   PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);
1915:   DMPlexGetChart(dm, &pStart, &pEnd);
1916:   PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
1917:   return(0);
1918: }