Actual source code: plexpartition.c

petsc-master 2020-07-04
Report Typos and Errors
  1:  #include <petsc/private/dmpleximpl.h>
  2:  #include <petsc/private/hashseti.h>

  4: PetscClassId PETSCPARTITIONER_CLASSID = 0;

  6: PetscFunctionList PetscPartitionerList              = NULL;
  7: PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;

  9: PetscBool ChacoPartitionercite = PETSC_FALSE;
 10: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
 11:                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
 12:                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
 13:                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
 14:                                "  isbn      = {0-89791-816-9},\n"
 15:                                "  pages     = {28},\n"
 16:                                "  doi       = {https://doi.acm.org/10.1145/224170.224228},\n"
 17:                                "  publisher = {ACM Press},\n"
 18:                                "  address   = {New York},\n"
 19:                                "  year      = {1995}\n}\n";

 21: PetscBool ParMetisPartitionercite = PETSC_FALSE;
 22: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
 23:                                "  author  = {George Karypis and Vipin Kumar},\n"
 24:                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
 25:                                "  journal = {Journal of Parallel and Distributed Computing},\n"
 26:                                "  volume  = {48},\n"
 27:                                "  pages   = {71--85},\n"
 28:                                "  year    = {1998}\n}\n";

 30: PetscBool PTScotchPartitionercite = PETSC_FALSE;
 31: const char PTScotchPartitionerCitation[] =
 32:   "@article{PTSCOTCH,\n"
 33:   "  author  = {C. Chevalier and F. Pellegrini},\n"
 34:   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
 35:   "  journal = {Parallel Computing},\n"
 36:   "  volume  = {34},\n"
 37:   "  number  = {6},\n"
 38:   "  pages   = {318--331},\n"
 39:   "  year    = {2008},\n"
 40:   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
 41:   "}\n";


 44: PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }

 46: static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
 47: {
 48:   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
 49:   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
 50:   IS             cellNumbering;
 51:   const PetscInt *cellNum;
 52:   PetscBool      useCone, useClosure;
 53:   PetscSection   section;
 54:   PetscSegBuffer adjBuffer;
 55:   PetscSF        sfPoint;
 56:   PetscInt       *adjCells = NULL, *remoteCells = NULL;
 57:   const PetscInt *local;
 58:   PetscInt       nroots, nleaves, l;
 59:   PetscMPIInt    rank;

 63:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
 64:   DMGetDimension(dm, &dim);
 65:   DMPlexGetDepth(dm, &depth);
 66:   if (dim != depth) {
 67:     /* We do not handle the uninterpolated case here */
 68:     DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);
 69:     /* DMPlexCreateNeighborCSR does not make a numbering */
 70:     if (globalNumbering) {DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);}
 71:     /* Different behavior for empty graphs */
 72:     if (!*numVertices) {
 73:       PetscMalloc1(1, offsets);
 74:       (*offsets)[0] = 0;
 75:     }
 76:     /* Broken in parallel */
 77:     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
 78:     return(0);
 79:   }
 80:   DMGetPointSF(dm, &sfPoint);
 81:   DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);
 82:   /* Build adjacency graph via a section/segbuffer */
 83:   PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);
 84:   PetscSectionSetChart(section, pStart, pEnd);
 85:   PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);
 86:   /* Always use FVM adjacency to create partitioner graph */
 87:   DMGetBasicAdjacency(dm, &useCone, &useClosure);
 88:   DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);
 89:   DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);
 90:   if (globalNumbering) {
 91:     PetscObjectReference((PetscObject)cellNumbering);
 92:     *globalNumbering = cellNumbering;
 93:   }
 94:   ISGetIndices(cellNumbering, &cellNum);
 95:   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
 96:      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
 97:    */
 98:   PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);
 99:   if (nroots >= 0) {
100:     PetscInt fStart, fEnd, f;

102:     PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);
103:     DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);
104:     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
105:     for (f = fStart; f < fEnd; ++f) {
106:       const PetscInt *support;
107:       PetscInt        supportSize;

109:       DMPlexGetSupport(dm, f, &support);
110:       DMPlexGetSupportSize(dm, f, &supportSize);
111:       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
112:       else if (supportSize == 2) {
113:         PetscFindInt(support[0], nleaves, local, &p);
114:         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
115:         PetscFindInt(support[1], nleaves, local, &p);
116:         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
117:       }
118:       /* Handle non-conforming meshes */
119:       if (supportSize > 2) {
120:         PetscInt        numChildren, i;
121:         const PetscInt *children;

123:         DMPlexGetTreeChildren(dm, f, &numChildren, &children);
124:         for (i = 0; i < numChildren; ++i) {
125:           const PetscInt child = children[i];
126:           if (fStart <= child && child < fEnd) {
127:             DMPlexGetSupport(dm, child, &support);
128:             DMPlexGetSupportSize(dm, child, &supportSize);
129:             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
130:             else if (supportSize == 2) {
131:               PetscFindInt(support[0], nleaves, local, &p);
132:               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
133:               PetscFindInt(support[1], nleaves, local, &p);
134:               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
135:             }
136:           }
137:         }
138:       }
139:     }
140:     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
141:     PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);
142:     PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);
143:     PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);
144:     PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);
145:   }
146:   /* Combine local and global adjacencies */
147:   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
148:     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
149:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
150:     /* Add remote cells */
151:     if (remoteCells) {
152:       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
153:       PetscInt       coneSize, numChildren, c, i;
154:       const PetscInt *cone, *children;

156:       DMPlexGetCone(dm, p, &cone);
157:       DMPlexGetConeSize(dm, p, &coneSize);
158:       for (c = 0; c < coneSize; ++c) {
159:         const PetscInt point = cone[c];
160:         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
161:           PetscInt *PETSC_RESTRICT pBuf;
162:           PetscSectionAddDof(section, p, 1);
163:           PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
164:           *pBuf = remoteCells[point];
165:         }
166:         /* Handle non-conforming meshes */
167:         DMPlexGetTreeChildren(dm, point, &numChildren, &children);
168:         for (i = 0; i < numChildren; ++i) {
169:           const PetscInt child = children[i];
170:           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
171:             PetscInt *PETSC_RESTRICT pBuf;
172:             PetscSectionAddDof(section, p, 1);
173:             PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
174:             *pBuf = remoteCells[child];
175:           }
176:         }
177:       }
178:     }
179:     /* Add local cells */
180:     adjSize = PETSC_DETERMINE;
181:     DMPlexGetAdjacency(dm, p, &adjSize, &adj);
182:     for (a = 0; a < adjSize; ++a) {
183:       const PetscInt point = adj[a];
184:       if (point != p && pStart <= point && point < pEnd) {
185:         PetscInt *PETSC_RESTRICT pBuf;
186:         PetscSectionAddDof(section, p, 1);
187:         PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
188:         *pBuf = DMPlex_GlobalID(cellNum[point]);
189:       }
190:     }
191:     (*numVertices)++;
192:   }
193:   PetscFree(adj);
194:   PetscFree2(adjCells, remoteCells);
195:   DMSetBasicAdjacency(dm, useCone, useClosure);

197:   /* Derive CSR graph from section/segbuffer */
198:   PetscSectionSetUp(section);
199:   PetscSectionGetStorageSize(section, &size);
200:   PetscMalloc1(*numVertices+1, &vOffsets);
201:   for (idx = 0, p = pStart; p < pEnd; p++) {
202:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
203:     PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
204:   }
205:   vOffsets[*numVertices] = size;
206:   PetscSegBufferExtractAlloc(adjBuffer, &graph);

208:   if (nroots >= 0) {
209:     /* Filter out duplicate edges using section/segbuffer */
210:     PetscSectionReset(section);
211:     PetscSectionSetChart(section, 0, *numVertices);
212:     for (p = 0; p < *numVertices; p++) {
213:       PetscInt start = vOffsets[p], end = vOffsets[p+1];
214:       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
215:       PetscSortRemoveDupsInt(&numEdges, &graph[start]);
216:       PetscSectionSetDof(section, p, numEdges);
217:       PetscSegBufferGetInts(adjBuffer, numEdges, &edges);
218:       PetscArraycpy(edges, &graph[start], numEdges);
219:     }
220:     PetscFree(vOffsets);
221:     PetscFree(graph);
222:     /* Derive CSR graph from section/segbuffer */
223:     PetscSectionSetUp(section);
224:     PetscSectionGetStorageSize(section, &size);
225:     PetscMalloc1(*numVertices+1, &vOffsets);
226:     for (idx = 0, p = 0; p < *numVertices; p++) {
227:       PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
228:     }
229:     vOffsets[*numVertices] = size;
230:     PetscSegBufferExtractAlloc(adjBuffer, &graph);
231:   } else {
232:     /* Sort adjacencies (not strictly necessary) */
233:     for (p = 0; p < *numVertices; p++) {
234:       PetscInt start = vOffsets[p], end = vOffsets[p+1];
235:       PetscSortInt(end-start, &graph[start]);
236:     }
237:   }

239:   if (offsets) *offsets = vOffsets;
240:   if (adjacency) *adjacency = graph;

242:   /* Cleanup */
243:   PetscSegBufferDestroy(&adjBuffer);
244:   PetscSectionDestroy(&section);
245:   ISRestoreIndices(cellNumbering, &cellNum);
246:   ISDestroy(&cellNumbering);
247:   return(0);
248: }

250: static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
251: {
252:   Mat            conn, CSR;
253:   IS             fis, cis, cis_own;
254:   PetscSF        sfPoint;
255:   const PetscInt *rows, *cols, *ii, *jj;
256:   PetscInt       *idxs,*idxs2;
257:   PetscInt       dim, depth, floc, cloc, i, M, N, c, lm, m, cStart, cEnd, fStart, fEnd;
258:   PetscMPIInt    rank;
259:   PetscBool      flg;

263:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
264:   DMGetDimension(dm, &dim);
265:   DMPlexGetDepth(dm, &depth);
266:   if (dim != depth) {
267:     /* We do not handle the uninterpolated case here */
268:     DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);
269:     /* DMPlexCreateNeighborCSR does not make a numbering */
270:     if (globalNumbering) {DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);}
271:     /* Different behavior for empty graphs */
272:     if (!*numVertices) {
273:       PetscMalloc1(1, offsets);
274:       (*offsets)[0] = 0;
275:     }
276:     /* Broken in parallel */
277:     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
278:     return(0);
279:   }
280:   /* Interpolated and parallel case */

282:   /* numbering */
283:   DMGetPointSF(dm, &sfPoint);
284:   DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);
285:   DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);
286:   DMPlexCreateNumbering_Plex(dm, cStart, cEnd, 0, &N, sfPoint, &cis);
287:   DMPlexCreateNumbering_Plex(dm, fStart, fEnd, 0, &M, sfPoint, &fis);
288:   if (globalNumbering) {
289:     ISDuplicate(cis, globalNumbering);
290:   }

292:   /* get positive global ids and local sizes for facets and cells */
293:   ISGetLocalSize(fis, &m);
294:   ISGetIndices(fis, &rows);
295:   PetscMalloc1(m, &idxs);
296:   for (i = 0, floc = 0; i < m; i++) {
297:     const PetscInt p = rows[i];

299:     if (p < 0) {
300:       idxs[i] = -(p+1);
301:     } else {
302:       idxs[i] = p;
303:       floc   += 1;
304:     }
305:   }
306:   ISRestoreIndices(fis, &rows);
307:   ISDestroy(&fis);
308:   ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);

310:   ISGetLocalSize(cis, &m);
311:   ISGetIndices(cis, &cols);
312:   PetscMalloc1(m, &idxs);
313:   PetscMalloc1(m, &idxs2);
314:   for (i = 0, cloc = 0; i < m; i++) {
315:     const PetscInt p = cols[i];

317:     if (p < 0) {
318:       idxs[i] = -(p+1);
319:     } else {
320:       idxs[i]       = p;
321:       idxs2[cloc++] = p;
322:     }
323:   }
324:   ISRestoreIndices(cis, &cols);
325:   ISDestroy(&cis);
326:   ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);
327:   ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);

329:   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
330:   MatCreate(PetscObjectComm((PetscObject)dm), &conn);
331:   MatSetSizes(conn, floc, cloc, M, N);
332:   MatSetType(conn, MATMPIAIJ);
333:   DMPlexGetMaxSizes(dm, NULL, &lm);
334:   MPI_Allreduce(&lm, &m, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject) dm));
335:   MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);

337:   /* Assemble matrix */
338:   ISGetIndices(fis, &rows);
339:   ISGetIndices(cis, &cols);
340:   for (c = cStart; c < cEnd; c++) {
341:     const PetscInt *cone;
342:     PetscInt        coneSize, row, col, f;

344:     col  = cols[c-cStart];
345:     DMPlexGetCone(dm, c, &cone);
346:     DMPlexGetConeSize(dm, c, &coneSize);
347:     for (f = 0; f < coneSize; f++) {
348:       const PetscScalar v = 1.0;
349:       const PetscInt *children;
350:       PetscInt        numChildren, ch;

352:       row  = rows[cone[f]-fStart];
353:       MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);

355:       /* non-conforming meshes */
356:       DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);
357:       for (ch = 0; ch < numChildren; ch++) {
358:         const PetscInt child = children[ch];

360:         if (child < fStart || child >= fEnd) continue;
361:         row  = rows[child-fStart];
362:         MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);
363:       }
364:     }
365:   }
366:   ISRestoreIndices(fis, &rows);
367:   ISRestoreIndices(cis, &cols);
368:   ISDestroy(&fis);
369:   ISDestroy(&cis);
370:   MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);
371:   MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);

373:   /* Get parallel CSR by doing conn^T * conn */
374:   MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);
375:   MatDestroy(&conn);

377:   /* extract local part of the CSR */
378:   MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);
379:   MatDestroy(&CSR);
380:   MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);
381:   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");

383:   /* get back requested output */
384:   if (numVertices) *numVertices = m;
385:   if (offsets) {
386:     PetscCalloc1(m+1, &idxs);
387:     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
388:     *offsets = idxs;
389:   }
390:   if (adjacency) {
391:     PetscMalloc1(ii[m] - m, &idxs);
392:     ISGetIndices(cis_own, &rows);
393:     for (i = 0, c = 0; i < m; i++) {
394:       PetscInt j, g = rows[i];

396:       for (j = ii[i]; j < ii[i+1]; j++) {
397:         if (jj[j] == g) continue; /* again, self-connectivity */
398:         idxs[c++] = jj[j];
399:       }
400:     }
401:     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
402:     ISRestoreIndices(cis_own, &rows);
403:     *adjacency = idxs;
404:   }

406:   /* cleanup */
407:   ISDestroy(&cis_own);
408:   MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);
409:   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
410:   MatDestroy(&conn);
411:   return(0);
412: }

414: /*@C
415:   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner

417:   Input Parameters:
418: + dm      - The mesh DM dm
419: - height  - Height of the strata from which to construct the graph

421:   Output Parameter:
422: + numVertices     - Number of vertices in the graph
423: . offsets         - Point offsets in the graph
424: . adjacency       - Point connectivity in the graph
425: - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.

427:   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
428:   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().

430:   Level: developer

432: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
433: @*/
434: PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
435: {
437:   PetscBool      usemat = PETSC_FALSE;

440:   PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);
441:   if (usemat) {
442:     DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);
443:   } else {
444:     DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);
445:   }
446:   return(0);
447: }

449: /*@C
450:   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.

452:   Collective on DM

454:   Input Arguments:
455: + dm - The DMPlex
456: - cellHeight - The height of mesh points to treat as cells (default should be 0)

458:   Output Arguments:
459: + numVertices - The number of local vertices in the graph, or cells in the mesh.
460: . offsets     - The offset to the adjacency list for each cell
461: - adjacency   - The adjacency list for all cells

463:   Note: This is suitable for input to a mesh partitioner like ParMetis.

465:   Level: advanced

467: .seealso: DMPlexCreate()
468: @*/
469: PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
470: {
471:   const PetscInt maxFaceCases = 30;
472:   PetscInt       numFaceCases = 0;
473:   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
474:   PetscInt      *off, *adj;
475:   PetscInt      *neighborCells = NULL;
476:   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;

480:   /* For parallel partitioning, I think you have to communicate supports */
481:   DMGetDimension(dm, &dim);
482:   cellDim = dim - cellHeight;
483:   DMPlexGetDepth(dm, &depth);
484:   DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);
485:   if (cEnd - cStart == 0) {
486:     if (numVertices) *numVertices = 0;
487:     if (offsets)   *offsets   = NULL;
488:     if (adjacency) *adjacency = NULL;
489:     return(0);
490:   }
491:   numCells  = cEnd - cStart;
492:   faceDepth = depth - cellHeight;
493:   if (dim == depth) {
494:     PetscInt f, fStart, fEnd;

496:     PetscCalloc1(numCells+1, &off);
497:     /* Count neighboring cells */
498:     DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);
499:     for (f = fStart; f < fEnd; ++f) {
500:       const PetscInt *support;
501:       PetscInt        supportSize;
502:       DMPlexGetSupportSize(dm, f, &supportSize);
503:       DMPlexGetSupport(dm, f, &support);
504:       if (supportSize == 2) {
505:         PetscInt numChildren;

507:         DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
508:         if (!numChildren) {
509:           ++off[support[0]-cStart+1];
510:           ++off[support[1]-cStart+1];
511:         }
512:       }
513:     }
514:     /* Prefix sum */
515:     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
516:     if (adjacency) {
517:       PetscInt *tmp;

519:       PetscMalloc1(off[numCells], &adj);
520:       PetscMalloc1(numCells+1, &tmp);
521:       PetscArraycpy(tmp, off, numCells+1);
522:       /* Get neighboring cells */
523:       for (f = fStart; f < fEnd; ++f) {
524:         const PetscInt *support;
525:         PetscInt        supportSize;
526:         DMPlexGetSupportSize(dm, f, &supportSize);
527:         DMPlexGetSupport(dm, f, &support);
528:         if (supportSize == 2) {
529:           PetscInt numChildren;

531:           DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
532:           if (!numChildren) {
533:             adj[tmp[support[0]-cStart]++] = support[1];
534:             adj[tmp[support[1]-cStart]++] = support[0];
535:           }
536:         }
537:       }
538:       if (PetscDefined(USE_DEBUG)) {
539:         for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
540:       }
541:       PetscFree(tmp);
542:     }
543:     if (numVertices) *numVertices = numCells;
544:     if (offsets)   *offsets   = off;
545:     if (adjacency) *adjacency = adj;
546:     return(0);
547:   }
548:   /* Setup face recognition */
549:   if (faceDepth == 1) {
550:     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */

552:     for (c = cStart; c < cEnd; ++c) {
553:       PetscInt corners;

555:       DMPlexGetConeSize(dm, c, &corners);
556:       if (!cornersSeen[corners]) {
557:         PetscInt nFV;

559:         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
560:         cornersSeen[corners] = 1;

562:         DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);

564:         numFaceVertices[numFaceCases++] = nFV;
565:       }
566:     }
567:   }
568:   PetscCalloc1(numCells+1, &off);
569:   /* Count neighboring cells */
570:   for (cell = cStart; cell < cEnd; ++cell) {
571:     PetscInt numNeighbors = PETSC_DETERMINE, n;

573:     DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
574:     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
575:     for (n = 0; n < numNeighbors; ++n) {
576:       PetscInt        cellPair[2];
577:       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
578:       PetscInt        meetSize = 0;
579:       const PetscInt *meet    = NULL;

581:       cellPair[0] = cell; cellPair[1] = neighborCells[n];
582:       if (cellPair[0] == cellPair[1]) continue;
583:       if (!found) {
584:         DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
585:         if (meetSize) {
586:           PetscInt f;

588:           for (f = 0; f < numFaceCases; ++f) {
589:             if (numFaceVertices[f] == meetSize) {
590:               found = PETSC_TRUE;
591:               break;
592:             }
593:           }
594:         }
595:         DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
596:       }
597:       if (found) ++off[cell-cStart+1];
598:     }
599:   }
600:   /* Prefix sum */
601:   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];

603:   if (adjacency) {
604:     PetscMalloc1(off[numCells], &adj);
605:     /* Get neighboring cells */
606:     for (cell = cStart; cell < cEnd; ++cell) {
607:       PetscInt numNeighbors = PETSC_DETERMINE, n;
608:       PetscInt cellOffset   = 0;

610:       DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
611:       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
612:       for (n = 0; n < numNeighbors; ++n) {
613:         PetscInt        cellPair[2];
614:         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
615:         PetscInt        meetSize = 0;
616:         const PetscInt *meet    = NULL;

618:         cellPair[0] = cell; cellPair[1] = neighborCells[n];
619:         if (cellPair[0] == cellPair[1]) continue;
620:         if (!found) {
621:           DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
622:           if (meetSize) {
623:             PetscInt f;

625:             for (f = 0; f < numFaceCases; ++f) {
626:               if (numFaceVertices[f] == meetSize) {
627:                 found = PETSC_TRUE;
628:                 break;
629:               }
630:             }
631:           }
632:           DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
633:         }
634:         if (found) {
635:           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
636:           ++cellOffset;
637:         }
638:       }
639:     }
640:   }
641:   PetscFree(neighborCells);
642:   if (numVertices) *numVertices = numCells;
643:   if (offsets)   *offsets   = off;
644:   if (adjacency) *adjacency = adj;
645:   return(0);
646: }

648: /*@C
649:   PetscPartitionerRegister - Adds a new PetscPartitioner implementation

651:   Not Collective

653:   Input Parameters:
654: + name        - The name of a new user-defined creation routine
655: - create_func - The creation routine itself

657:   Notes:
658:   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners

660:   Sample usage:
661: .vb
662:     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
663: .ve

665:   Then, your PetscPartitioner type can be chosen with the procedural interface via
666: .vb
667:     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
668:     PetscPartitionerSetType(PetscPartitioner, "my_part");
669: .ve
670:    or at runtime via the option
671: .vb
672:     -petscpartitioner_type my_part
673: .ve

675:   Level: advanced

677: .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()

679: @*/
680: PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
681: {

685:   PetscFunctionListAdd(&PetscPartitionerList, sname, function);
686:   return(0);
687: }

689: /*@C
690:   PetscPartitionerSetType - Builds a particular PetscPartitioner

692:   Collective on PetscPartitioner

694:   Input Parameters:
695: + part - The PetscPartitioner object
696: - name - The kind of partitioner

698:   Options Database Key:
699: . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types

701:   Level: intermediate

703: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
704: @*/
705: PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
706: {
707:   PetscErrorCode (*r)(PetscPartitioner);
708:   PetscBool      match;

713:   PetscObjectTypeCompare((PetscObject) part, name, &match);
714:   if (match) return(0);

716:   PetscFunctionListFind(PetscPartitionerList, name, &r);
717:   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);

719:   if (part->ops->destroy) {
720:     (*part->ops->destroy)(part);
721:   }
722:   part->noGraph = PETSC_FALSE;
723:   PetscMemzero(part->ops, sizeof(*part->ops));
724:   PetscObjectChangeTypeName((PetscObject) part, name);
725:   (*r)(part);
726:   return(0);
727: }

729: /*@C
730:   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.

732:   Not Collective

734:   Input Parameter:
735: . part - The PetscPartitioner

737:   Output Parameter:
738: . name - The PetscPartitioner type name

740:   Level: intermediate

742: .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
743: @*/
744: PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
745: {
749:   *name = ((PetscObject) part)->type_name;
750:   return(0);
751: }

753: /*@C
754:    PetscPartitionerViewFromOptions - View from Options

756:    Collective on PetscPartitioner

758:    Input Parameters:
759: +  A - the PetscPartitioner object
760: .  obj - Optional object
761: -  name - command line option

763:    Level: intermediate
764: .seealso:  PetscPartitionerView(), PetscObjectViewFromOptions()
765: @*/
766: PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner A,PetscObject obj,const char name[])
767: {

772:   PetscObjectViewFromOptions((PetscObject)A,obj,name);
773:   return(0);
774: }

776: /*@C
777:   PetscPartitionerView - Views a PetscPartitioner

779:   Collective on PetscPartitioner

781:   Input Parameter:
782: + part - the PetscPartitioner object to view
783: - v    - the viewer

785:   Level: developer

787: .seealso: PetscPartitionerDestroy()
788: @*/
789: PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
790: {
791:   PetscMPIInt    size;
792:   PetscBool      isascii;

797:   if (!v) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);}
798:   PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);
799:   if (isascii) {
800:     MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
801:     PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");
802:     PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);
803:     PetscViewerASCIIPrintf(v, "  edge cut: %D\n", part->edgeCut);
804:     PetscViewerASCIIPrintf(v, "  balance: %.2g\n", part->balance);
805:     PetscViewerASCIIPrintf(v, "  use vertex weights: %d\n", part->usevwgt);
806:   }
807:   if (part->ops->view) {(*part->ops->view)(part, v);}
808:   return(0);
809: }

811: static PetscErrorCode PetscPartitionerGetDefaultType(MPI_Comm comm, const char *currentType, const char **defaultType)
812: {
813:   PetscMPIInt    size;

817:   MPI_Comm_size(comm, &size);
818:   if (!currentType) {
819:     if (size == 1) {
820:       *defaultType = PETSCPARTITIONERSIMPLE;
821:     } else {
822: #if defined(PETSC_HAVE_PARMETIS)
823:       *defaultType = PETSCPARTITIONERPARMETIS;
824: #elif defined(PETSC_HAVE_PTSCOTCH)
825:       *defaultType = PETSCPARTITIONERPTSCOTCH;
826: #elif defined(PETSC_HAVE_CHACO)
827:       *defaultType = PETSCPARTITIONERCHACO;
828: #else
829:       *defaultType = PETSCPARTITIONERSIMPLE;
830: #endif
831:     }
832:   } else {
833:     *defaultType = currentType;
834:   }
835:   return(0);
836: }

838: /*@
839:   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database

841:   Collective on PetscPartitioner

843:   Input Parameter:
844: . part - the PetscPartitioner object to set options for

846:   Options Database Keys:
847: +  -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
848: .  -petscpartitioner_use_vertex_weights - Uses weights associated with the graph vertices
849: -  -petscpartitioner_view_graph - View the graph each time PetscPartitionerPartition is called. Viewer can be customized, see PetscOptionsGetViewer()

851:   Level: developer

853: .seealso: PetscPartitionerView(), PetscPartitionerSetType(), PetscPartitionerPartition()
854: @*/
855: PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
856: {
857:   const char    *defaultType;
858:   char           name[256];
859:   PetscBool      flg;

864:   PetscPartitionerGetDefaultType(((PetscObject) part)->comm, ((PetscObject) part)->type_name,&defaultType);
865:   PetscObjectOptionsBegin((PetscObject) part);
866:   PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);
867:   if (flg) {
868:     PetscPartitionerSetType(part, name);
869:   } else if (!((PetscObject) part)->type_name) {
870:     PetscPartitionerSetType(part, defaultType);
871:   }
872:   PetscOptionsBool("-petscpartitioner_use_vertex_weights","Use vertex weights","",part->usevwgt,&part->usevwgt,NULL);
873:   if (part->ops->setfromoptions) {
874:     (*part->ops->setfromoptions)(PetscOptionsObject,part);
875:   }
876:   PetscViewerDestroy(&part->viewer);
877:   PetscViewerDestroy(&part->viewerGraph);
878:   PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view", &part->viewer, NULL, NULL);
879:   PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, NULL, &part->viewGraph);
880:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
881:   PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);
882:   PetscOptionsEnd();
883:   return(0);
884: }

886: /*@C
887:   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner

889:   Collective on PetscPartitioner

891:   Input Parameter:
892: . part - the PetscPartitioner object to setup

894:   Level: developer

896: .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
897: @*/
898: PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
899: {

904:   if (part->ops->setup) {(*part->ops->setup)(part);}
905:   return(0);
906: }

908: /*@
909:   PetscPartitionerDestroy - Destroys a PetscPartitioner object

911:   Collective on PetscPartitioner

913:   Input Parameter:
914: . part - the PetscPartitioner object to destroy

916:   Level: developer

918: .seealso: PetscPartitionerView()
919: @*/
920: PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
921: {

925:   if (!*part) return(0);

928:   if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
929:   ((PetscObject) (*part))->refct = 0;

931:   PetscViewerDestroy(&(*part)->viewer);
932:   PetscViewerDestroy(&(*part)->viewerGraph);
933:   if ((*part)->ops->destroy) {(*(*part)->ops->destroy)(*part);}
934:   PetscHeaderDestroy(part);
935:   return(0);
936: }

938: /*@
939:   PetscPartitionerPartition - Partition a graph

941:   Collective on PetscPartitioner

943:   Input Parameters:
944: + part    - The PetscPartitioner
945: . nparts  - Number of partitions
946: . numVertices - Number of vertices in the local part of the graph
947: . start - row pointers for the local part of the graph (CSR style)
948: . adjacency - adjacency list (CSR style)
949: . vertexSection - PetscSection describing the absolute weight of each local vertex (can be NULL)
950: - targetSection - PetscSection describing the absolute weight of each partition (can be NULL)

952:   Output Parameters:
953: + partSection     - The PetscSection giving the division of points by partition
954: - partition       - The list of points by partition

956:   Options Database:
957: + -petscpartitioner_view - View the partitioner information
958: - -petscpartitioner_view_graph - View the graph we are partitioning

960:   Notes:
961:     The chart of the vertexSection (if present) must contain [0,numVertices), with the number of dofs in the section specifying the absolute weight for each vertex.
962:     The chart of the targetSection (if present) must contain [0,nparts), with the number of dofs in the section specifying the absolute weight for each partition. This information must be the same across processes, PETSc does not check it.

964:   Level: developer

966: .seealso PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscSectionSetDof()
967: @*/
968: PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertexSection, PetscSection targetSection, PetscSection partSection, IS *partition)
969: {

975:   if (nparts <= 0) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Number of parts must be positive");
976:   if (numVertices < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices must be non-negative");
977:   if (numVertices && !part->noGraph) {
981:   }
982:   if (vertexSection) {
983:     PetscInt s,e;

986:     PetscSectionGetChart(vertexSection, &s, &e);
987:     if (s > 0 || e < numVertices) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid vertexSection chart [%D,%D)",s,e);
988:   }
989:   if (targetSection) {
990:     PetscInt s,e;

993:     PetscSectionGetChart(targetSection, &s, &e);
994:     if (s > 0 || e < nparts) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid targetSection chart [%D,%D)",s,e);
995:   }

999:   PetscSectionReset(partSection);
1000:   PetscSectionSetChart(partSection, 0, nparts);
1001:   if (nparts == 1) { /* quick */
1002:     PetscSectionSetDof(partSection, 0, numVertices);
1003:     ISCreateStride(PetscObjectComm((PetscObject)part),numVertices,0,1,partition);
1004:   } else {
1005:     if (!part->ops->partition) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "PetscPartitioner %s has no partitioning method", ((PetscObject)part)->type_name);
1006:     (*part->ops->partition)(part, nparts, numVertices, start, adjacency, vertexSection, targetSection, partSection, partition);
1007:   }
1008:   PetscSectionSetUp(partSection);
1009:   if (part->viewerGraph) {
1010:     PetscViewer viewer = part->viewerGraph;
1011:     PetscBool   isascii;
1012:     PetscInt    v, i;
1013:     PetscMPIInt rank;

1015:     MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);
1016:     PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);
1017:     if (isascii) {
1018:       PetscViewerASCIIPushSynchronized(viewer);
1019:       PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);
1020:       for (v = 0; v < numVertices; ++v) {
1021:         const PetscInt s = start[v];
1022:         const PetscInt e = start[v+1];

1024:         PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);
1025:         for (i = s; i < e; ++i) {PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);}
1026:         PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);
1027:       }
1028:       PetscViewerFlush(viewer);
1029:       PetscViewerASCIIPopSynchronized(viewer);
1030:     }
1031:   }
1032:   if (part->viewer) {
1033:     PetscPartitionerView(part,part->viewer);
1034:   }
1035:   return(0);
1036: }

1038: /*@
1039:   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().

1041:   Collective

1043:   Input Parameter:
1044: . comm - The communicator for the PetscPartitioner object

1046:   Output Parameter:
1047: . part - The PetscPartitioner object

1049:   Level: beginner

1051: .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
1052: @*/
1053: PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
1054: {
1055:   PetscPartitioner p;
1056:   const char       *partitionerType = NULL;
1057:   PetscErrorCode   ierr;

1061:   *part = NULL;
1062:   DMInitializePackage();

1064:   PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);
1065:   PetscPartitionerGetDefaultType(comm, NULL, &partitionerType);
1066:   PetscPartitionerSetType(p,partitionerType);

1068:   p->edgeCut = 0;
1069:   p->balance = 0.0;
1070:   p->usevwgt = PETSC_TRUE;

1072:   *part = p;
1073:   return(0);
1074: }

1076: /*@
1077:   PetscPartitionerDMPlexPartition - Create a non-overlapping partition of the cells in the mesh

1079:   Collective on PetscPartitioner

1081:   Input Parameters:
1082: + part    - The PetscPartitioner
1083: . targetSection - The PetscSection describing the absolute weight of each partition (can be NULL)
1084: - dm      - The mesh DM

1086:   Output Parameters:
1087: + partSection     - The PetscSection giving the division of points by partition
1088: - partition       - The list of points by partition

1090:   Notes:
1091:     If the DM has a local section associated, each point to be partitioned will be weighted by the total number of dofs identified
1092:     by the section in the transitive closure of the point.

1094:   Level: developer

1096: .seealso DMPlexDistribute(), PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscPartitionerPartition()
1097: @*/
1098: PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner part, DM dm, PetscSection targetSection, PetscSection partSection, IS *partition)
1099: {
1100:   PetscMPIInt    size;
1101:   PetscBool      isplex;
1103:   PetscSection   vertSection = NULL;

1111:   PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&isplex);
1112:   if (!isplex) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Not for type %s",((PetscObject)dm)->type_name);
1113:   MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
1114:   if (size == 1) {
1115:     PetscInt *points;
1116:     PetscInt  cStart, cEnd, c;

1118:     DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
1119:     PetscSectionReset(partSection);
1120:     PetscSectionSetChart(partSection, 0, size);
1121:     PetscSectionSetDof(partSection, 0, cEnd-cStart);
1122:     PetscSectionSetUp(partSection);
1123:     PetscMalloc1(cEnd-cStart, &points);
1124:     for (c = cStart; c < cEnd; ++c) points[c] = c;
1125:     ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);
1126:     return(0);
1127:   }
1128:   if (part->height == 0) {
1129:     PetscInt numVertices = 0;
1130:     PetscInt *start     = NULL;
1131:     PetscInt *adjacency = NULL;
1132:     IS       globalNumbering;

1134:     if (!part->noGraph || part->viewGraph) {
1135:       DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);
1136:     } else { /* only compute the number of owned local vertices */
1137:       const PetscInt *idxs;
1138:       PetscInt       p, pStart, pEnd;

1140:       DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);
1141:       DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);
1142:       ISGetIndices(globalNumbering, &idxs);
1143:       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
1144:       ISRestoreIndices(globalNumbering, &idxs);
1145:     }
1146:     if (part->usevwgt) {
1147:       PetscSection   section = dm->localSection, clSection = NULL;
1148:       IS             clPoints = NULL;
1149:       const PetscInt *gid,*clIdx;
1150:       PetscInt       v, p, pStart, pEnd;

1152:       /* dm->localSection encodes degrees of freedom per point, not per cell. We need to get the closure index to properly specify cell weights (aka dofs) */
1153:       /* We do this only if the local section has been set */
1154:       if (section) {
1155:         PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, NULL);
1156:         if (!clSection) {
1157:           DMPlexCreateClosureIndex(dm,NULL);
1158:         }
1159:         PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, &clPoints);
1160:         ISGetIndices(clPoints,&clIdx);
1161:       }
1162:       DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);
1163:       PetscSectionCreate(PETSC_COMM_SELF, &vertSection);
1164:       PetscSectionSetChart(vertSection, 0, numVertices);
1165:       if (globalNumbering) {
1166:         ISGetIndices(globalNumbering,&gid);
1167:       } else gid = NULL;
1168:       for (p = pStart, v = 0; p < pEnd; ++p) {
1169:         PetscInt dof = 1;

1171:         /* skip cells in the overlap */
1172:         if (gid && gid[p-pStart] < 0) continue;

1174:         if (section) {
1175:           PetscInt cl, clSize, clOff;

1177:           dof  = 0;
1178:           PetscSectionGetDof(clSection, p, &clSize);
1179:           PetscSectionGetOffset(clSection, p, &clOff);
1180:           for (cl = 0; cl < clSize; cl+=2) {
1181:             PetscInt clDof, clPoint = clIdx[clOff + cl]; /* odd indices are reserved for orientations */

1183:             PetscSectionGetDof(section, clPoint, &clDof);
1184:             dof += clDof;
1185:           }
1186:         }
1187:         if (!dof) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of dofs for point %D in the local section should be positive",p);
1188:         PetscSectionSetDof(vertSection, v, dof);
1189:         v++;
1190:       }
1191:       if (globalNumbering) {
1192:         ISRestoreIndices(globalNumbering,&gid);
1193:       }
1194:       if (clPoints) {
1195:         ISRestoreIndices(clPoints,&clIdx);
1196:       }
1197:       PetscSectionSetUp(vertSection);
1198:     }
1199:     PetscPartitionerPartition(part, size, numVertices, start, adjacency, vertSection, targetSection, partSection, partition);
1200:     PetscFree(start);
1201:     PetscFree(adjacency);
1202:     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1203:       const PetscInt *globalNum;
1204:       const PetscInt *partIdx;
1205:       PetscInt       *map, cStart, cEnd;
1206:       PetscInt       *adjusted, i, localSize, offset;
1207:       IS             newPartition;

1209:       ISGetLocalSize(*partition,&localSize);
1210:       PetscMalloc1(localSize,&adjusted);
1211:       ISGetIndices(globalNumbering,&globalNum);
1212:       ISGetIndices(*partition,&partIdx);
1213:       PetscMalloc1(localSize,&map);
1214:       DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
1215:       for (i = cStart, offset = 0; i < cEnd; i++) {
1216:         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1217:       }
1218:       for (i = 0; i < localSize; i++) {
1219:         adjusted[i] = map[partIdx[i]];
1220:       }
1221:       PetscFree(map);
1222:       ISRestoreIndices(*partition,&partIdx);
1223:       ISRestoreIndices(globalNumbering,&globalNum);
1224:       ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);
1225:       ISDestroy(&globalNumbering);
1226:       ISDestroy(partition);
1227:       *partition = newPartition;
1228:     }
1229:   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1230:   PetscSectionDestroy(&vertSection);
1231:   return(0);
1232: }

1234: static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1235: {
1236:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1237:   PetscErrorCode          ierr;

1240:   PetscSectionDestroy(&p->section);
1241:   ISDestroy(&p->partition);
1242:   PetscFree(p);
1243:   return(0);
1244: }

1246: static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1247: {
1248:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1249:   PetscErrorCode          ierr;

1252:   if (p->random) {
1253:     PetscViewerASCIIPushTab(viewer);
1254:     PetscViewerASCIIPrintf(viewer, "using random partition\n");
1255:     PetscViewerASCIIPopTab(viewer);
1256:   }
1257:   return(0);
1258: }

1260: static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1261: {
1262:   PetscBool      iascii;

1268:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1269:   if (iascii) {PetscPartitionerView_Shell_Ascii(part, viewer);}
1270:   return(0);
1271: }

1273: static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1274: {
1275:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1276:   PetscErrorCode          ierr;

1279:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");
1280:   PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);
1281:   PetscOptionsTail();
1282:   return(0);
1283: }

1285: static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1286: {
1287:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1288:   PetscInt                np;
1289:   PetscErrorCode          ierr;

1292:   if (p->random) {
1293:     PetscRandom r;
1294:     PetscInt   *sizes, *points, v, p;
1295:     PetscMPIInt rank;

1297:     MPI_Comm_rank(PetscObjectComm((PetscObject) part), &rank);
1298:     PetscRandomCreate(PETSC_COMM_SELF, &r);
1299:     PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);
1300:     PetscRandomSetFromOptions(r);
1301:     PetscCalloc2(nparts, &sizes, numVertices, &points);
1302:     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1303:     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1304:     for (v = numVertices-1; v > 0; --v) {
1305:       PetscReal val;
1306:       PetscInt  w, tmp;

1308:       PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));
1309:       PetscRandomGetValueReal(r, &val);
1310:       w    = PetscFloorReal(val);
1311:       tmp       = points[v];
1312:       points[v] = points[w];
1313:       points[w] = tmp;
1314:     }
1315:     PetscRandomDestroy(&r);
1316:     PetscPartitionerShellSetPartition(part, nparts, sizes, points);
1317:     PetscFree2(sizes, points);
1318:   }
1319:   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1320:   PetscSectionGetChart(p->section, NULL, &np);
1321:   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1322:   ISGetLocalSize(p->partition, &np);
1323:   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1324:   PetscSectionCopy(p->section, partSection);
1325:   *partition = p->partition;
1326:   PetscObjectReference((PetscObject) p->partition);
1327:   return(0);
1328: }

1330: static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1331: {
1333:   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1334:   part->ops->view           = PetscPartitionerView_Shell;
1335:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1336:   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1337:   part->ops->partition      = PetscPartitionerPartition_Shell;
1338:   return(0);
1339: }

1341: /*MC
1342:   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object

1344:   Level: intermediate

1346:   Options Database Keys:
1347: .  -petscpartitioner_shell_random - Use a random partition

1349: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1350: M*/

1352: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1353: {
1354:   PetscPartitioner_Shell *p;
1355:   PetscErrorCode          ierr;

1359:   PetscNewLog(part, &p);
1360:   part->data = p;

1362:   PetscPartitionerInitialize_Shell(part);
1363:   p->random = PETSC_FALSE;
1364:   return(0);
1365: }

1367: /*@C
1368:   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh

1370:   Collective on PetscPartitioner

1372:   Input Parameters:
1373: + part   - The PetscPartitioner
1374: . size   - The number of partitions
1375: . sizes  - array of length size (or NULL) providing the number of points in each partition
1376: - points - array of length sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)

1378:   Level: developer

1380:   Notes:
1381:     It is safe to free the sizes and points arrays after use in this routine.

1383: .seealso DMPlexDistribute(), PetscPartitionerCreate()
1384: @*/
1385: PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1386: {
1387:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1388:   PetscInt                proc, numPoints;
1389:   PetscErrorCode          ierr;

1395:   PetscSectionDestroy(&p->section);
1396:   ISDestroy(&p->partition);
1397:   PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);
1398:   PetscSectionSetChart(p->section, 0, size);
1399:   if (sizes) {
1400:     for (proc = 0; proc < size; ++proc) {
1401:       PetscSectionSetDof(p->section, proc, sizes[proc]);
1402:     }
1403:   }
1404:   PetscSectionSetUp(p->section);
1405:   PetscSectionGetStorageSize(p->section, &numPoints);
1406:   ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);
1407:   return(0);
1408: }

1410: /*@
1411:   PetscPartitionerShellSetRandom - Set the flag to use a random partition

1413:   Collective on PetscPartitioner

1415:   Input Parameters:
1416: + part   - The PetscPartitioner
1417: - random - The flag to use a random partition

1419:   Level: intermediate

1421: .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1422: @*/
1423: PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1424: {
1425:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

1429:   p->random = random;
1430:   return(0);
1431: }

1433: /*@
1434:   PetscPartitionerShellGetRandom - get the flag to use a random partition

1436:   Collective on PetscPartitioner

1438:   Input Parameter:
1439: . part   - The PetscPartitioner

1441:   Output Parameter:
1442: . random - The flag to use a random partition

1444:   Level: intermediate

1446: .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1447: @*/
1448: PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1449: {
1450:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

1455:   *random = p->random;
1456:   return(0);
1457: }

1459: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1460: {
1461:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1462:   PetscErrorCode          ierr;

1465:   PetscFree(p);
1466:   return(0);
1467: }

1469: static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1470: {
1472:   return(0);
1473: }

1475: static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1476: {
1477:   PetscBool      iascii;

1483:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1484:   if (iascii) {PetscPartitionerView_Simple_Ascii(part, viewer);}
1485:   return(0);
1486: }

1488: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1489: {
1490:   MPI_Comm       comm;
1491:   PetscInt       np, *tpwgts = NULL, sumw = 0, numVerticesGlobal  = 0;
1492:   PetscMPIInt    size;

1496:   if (vertSection) { PetscInfo(part,"PETSCPARTITIONERSIMPLE ignores vertex weights\n"); }
1497:   comm = PetscObjectComm((PetscObject)part);
1498:   MPI_Comm_size(comm,&size);
1499:   if (targetSection) {
1500:     MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm);
1501:     PetscCalloc1(nparts,&tpwgts);
1502:     for (np = 0; np < nparts; ++np) {
1503:       PetscSectionGetDof(targetSection,np,&tpwgts[np]);
1504:       sumw += tpwgts[np];
1505:     }
1506:     if (!sumw) {
1507:       PetscFree(tpwgts);
1508:     } else {
1509:       PetscInt m,mp;
1510:       for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np]*numVerticesGlobal)/sumw;
1511:       for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
1512:         if (m < tpwgts[np]) { m = tpwgts[np]; mp = np; }
1513:         sumw += tpwgts[np];
1514:       }
1515:       if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
1516:     }
1517:   }

1519:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1520:   if (size == 1) {
1521:     if (tpwgts) {
1522:       for (np = 0; np < nparts; ++np) {
1523:         PetscSectionSetDof(partSection, np, tpwgts[np]);
1524:       }
1525:     } else {
1526:       for (np = 0; np < nparts; ++np) {
1527:         PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));
1528:       }
1529:     }
1530:   } else {
1531:     if (tpwgts) {
1532:       Vec         v;
1533:       PetscScalar *array;
1534:       PetscInt    st,j;
1535:       PetscMPIInt rank;

1537:       VecCreate(comm,&v);
1538:       VecSetSizes(v,numVertices,numVerticesGlobal);
1539:       VecSetType(v,VECSTANDARD);
1540:       MPI_Comm_rank(comm,&rank);
1541:       for (np = 0,st = 0; np < nparts; ++np) {
1542:         if (rank == np || (rank == size-1 && size < nparts && np >= size)) {
1543:           for (j = 0; j < tpwgts[np]; j++) {
1544:             VecSetValue(v,st+j,np,INSERT_VALUES);
1545:           }
1546:         }
1547:         st += tpwgts[np];
1548:       }
1549:       VecAssemblyBegin(v);
1550:       VecAssemblyEnd(v);
1551:       VecGetArray(v,&array);
1552:       for (j = 0; j < numVertices; ++j) {
1553:         PetscSectionAddDof(partSection,PetscRealPart(array[j]),1);
1554:       }
1555:       VecRestoreArray(v,&array);
1556:       VecDestroy(&v);
1557:     } else {
1558:       PetscMPIInt rank;
1559:       PetscInt nvGlobal, *offsets, myFirst, myLast;

1561:       PetscMalloc1(size+1,&offsets);
1562:       offsets[0] = 0;
1563:       MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);
1564:       for (np = 2; np <= size; np++) {
1565:         offsets[np] += offsets[np-1];
1566:       }
1567:       nvGlobal = offsets[size];
1568:       MPI_Comm_rank(comm,&rank);
1569:       myFirst = offsets[rank];
1570:       myLast  = offsets[rank + 1] - 1;
1571:       PetscFree(offsets);
1572:       if (numVertices) {
1573:         PetscInt firstPart = 0, firstLargePart = 0;
1574:         PetscInt lastPart = 0, lastLargePart = 0;
1575:         PetscInt rem = nvGlobal % nparts;
1576:         PetscInt pSmall = nvGlobal/nparts;
1577:         PetscInt pBig = nvGlobal/nparts + 1;

1579:         if (rem) {
1580:           firstLargePart = myFirst / pBig;
1581:           lastLargePart  = myLast  / pBig;

1583:           if (firstLargePart < rem) {
1584:             firstPart = firstLargePart;
1585:           } else {
1586:             firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1587:           }
1588:           if (lastLargePart < rem) {
1589:             lastPart = lastLargePart;
1590:           } else {
1591:             lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1592:           }
1593:         } else {
1594:           firstPart = myFirst / (nvGlobal/nparts);
1595:           lastPart  = myLast  / (nvGlobal/nparts);
1596:         }

1598:         for (np = firstPart; np <= lastPart; np++) {
1599:           PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1600:           PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);

1602:           PartStart = PetscMax(PartStart,myFirst);
1603:           PartEnd   = PetscMin(PartEnd,myLast+1);
1604:           PetscSectionSetDof(partSection,np,PartEnd-PartStart);
1605:         }
1606:       }
1607:     }
1608:   }
1609:   PetscFree(tpwgts);
1610:   return(0);
1611: }

1613: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1614: {
1616:   part->noGraph        = PETSC_TRUE;
1617:   part->ops->view      = PetscPartitionerView_Simple;
1618:   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1619:   part->ops->partition = PetscPartitionerPartition_Simple;
1620:   return(0);
1621: }

1623: /*MC
1624:   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object

1626:   Level: intermediate

1628: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1629: M*/

1631: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1632: {
1633:   PetscPartitioner_Simple *p;
1634:   PetscErrorCode           ierr;

1638:   PetscNewLog(part, &p);
1639:   part->data = p;

1641:   PetscPartitionerInitialize_Simple(part);
1642:   return(0);
1643: }

1645: static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1646: {
1647:   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1648:   PetscErrorCode          ierr;

1651:   PetscFree(p);
1652:   return(0);
1653: }

1655: static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1656: {
1658:   return(0);
1659: }

1661: static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1662: {
1663:   PetscBool      iascii;

1669:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1670:   if (iascii) {PetscPartitionerView_Gather_Ascii(part, viewer);}
1671:   return(0);
1672: }

1674: static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1675: {
1676:   PetscInt       np;

1680:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1681:   PetscSectionSetDof(partSection,0,numVertices);
1682:   for (np = 1; np < nparts; ++np) {PetscSectionSetDof(partSection, np, 0);}
1683:   return(0);
1684: }

1686: static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1687: {
1689:   part->noGraph        = PETSC_TRUE;
1690:   part->ops->view      = PetscPartitionerView_Gather;
1691:   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1692:   part->ops->partition = PetscPartitionerPartition_Gather;
1693:   return(0);
1694: }

1696: /*MC
1697:   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object

1699:   Level: intermediate

1701: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1702: M*/

1704: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1705: {
1706:   PetscPartitioner_Gather *p;
1707:   PetscErrorCode           ierr;

1711:   PetscNewLog(part, &p);
1712:   part->data = p;

1714:   PetscPartitionerInitialize_Gather(part);
1715:   return(0);
1716: }


1719: static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1720: {
1721:   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1722:   PetscErrorCode          ierr;

1725:   PetscFree(p);
1726:   return(0);
1727: }

1729: static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1730: {
1732:   return(0);
1733: }

1735: static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1736: {
1737:   PetscBool      iascii;

1743:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1744:   if (iascii) {PetscPartitionerView_Chaco_Ascii(part, viewer);}
1745:   return(0);
1746: }

1748: #if defined(PETSC_HAVE_CHACO)
1749: #if defined(PETSC_HAVE_UNISTD_H)
1750: #include <unistd.h>
1751: #endif
1752: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1753: #include <chaco.h>
1754: #else
1755: /* Older versions of Chaco do not have an include file */
1756: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1757:                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1758:                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1759:                        int mesh_dims[3], double *goal, int global_method, int local_method,
1760:                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1761: #endif
1762: extern int FREE_GRAPH;
1763: #endif

1765: static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1766: {
1767: #if defined(PETSC_HAVE_CHACO)
1768:   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1769:   MPI_Comm       comm;
1770:   int            nvtxs          = numVertices; /* number of vertices in full graph */
1771:   int           *vwgts          = NULL;   /* weights for all vertices */
1772:   float         *ewgts          = NULL;   /* weights for all edges */
1773:   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1774:   char          *outassignname  = NULL;   /*  name of assignment output file */
1775:   char          *outfilename    = NULL;   /* output file name */
1776:   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1777:   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1778:   int            mesh_dims[3];            /* dimensions of mesh of processors */
1779:   double        *goal          = NULL;    /* desired set sizes for each set */
1780:   int            global_method = 1;       /* global partitioning algorithm */
1781:   int            local_method  = 1;       /* local partitioning algorithm */
1782:   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1783:   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1784:   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1785:   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1786:   long           seed          = 123636512; /* for random graph mutations */
1787: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1788:   int           *assignment;              /* Output partition */
1789: #else
1790:   short int     *assignment;              /* Output partition */
1791: #endif
1792:   int            fd_stdout, fd_pipe[2];
1793:   PetscInt      *points;
1794:   int            i, v, p;

1798:   PetscObjectGetComm((PetscObject)part,&comm);
1799:   if (PetscDefined (USE_DEBUG)) {
1800:     int ival,isum;
1801:     PetscBool distributed;

1803:     ival = (numVertices > 0);
1804:     MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);
1805:     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1806:     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1807:   }
1808:   if (!numVertices) { /* distributed case, return if not holding the graph */
1809:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
1810:     return(0);
1811:   }
1812:   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1813:   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];

1815:   if (global_method == INERTIAL_METHOD) {
1816:     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1817:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1818:   }
1819:   mesh_dims[0] = nparts;
1820:   mesh_dims[1] = 1;
1821:   mesh_dims[2] = 1;
1822:   PetscMalloc1(nvtxs, &assignment);
1823:   /* Chaco outputs to stdout. We redirect this to a buffer. */
1824:   /* TODO: check error codes for UNIX calls */
1825: #if defined(PETSC_HAVE_UNISTD_H)
1826:   {
1827:     int piperet;
1828:     piperet = pipe(fd_pipe);
1829:     if (piperet) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"Could not create pipe");
1830:     fd_stdout = dup(1);
1831:     close(1);
1832:     dup2(fd_pipe[1], 1);
1833:   }
1834: #endif
1835:   if (part->usevwgt) { PetscInfo(part,"PETSCPARTITIONERCHACO ignores vertex weights\n"); }
1836:   interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1837:                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1838:                    vmax, ndims, eigtol, seed);
1839: #if defined(PETSC_HAVE_UNISTD_H)
1840:   {
1841:     char msgLog[10000];
1842:     int  count;

1844:     fflush(stdout);
1845:     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1846:     if (count < 0) count = 0;
1847:     msgLog[count] = 0;
1848:     close(1);
1849:     dup2(fd_stdout, 1);
1850:     close(fd_stdout);
1851:     close(fd_pipe[0]);
1852:     close(fd_pipe[1]);
1853:     if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1854:   }
1855: #else
1856:   if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1857: #endif
1858:   /* Convert to PetscSection+IS */
1859:   for (v = 0; v < nvtxs; ++v) {
1860:     PetscSectionAddDof(partSection, assignment[v], 1);
1861:   }
1862:   PetscMalloc1(nvtxs, &points);
1863:   for (p = 0, i = 0; p < nparts; ++p) {
1864:     for (v = 0; v < nvtxs; ++v) {
1865:       if (assignment[v] == p) points[i++] = v;
1866:     }
1867:   }
1868:   if (i != nvtxs) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1869:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1870:   if (global_method == INERTIAL_METHOD) {
1871:     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1872:   }
1873:   PetscFree(assignment);
1874:   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1875:   return(0);
1876: #else
1877:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1878: #endif
1879: }

1881: static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1882: {
1884:   part->noGraph        = PETSC_FALSE;
1885:   part->ops->view      = PetscPartitionerView_Chaco;
1886:   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1887:   part->ops->partition = PetscPartitionerPartition_Chaco;
1888:   return(0);
1889: }

1891: /*MC
1892:   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library

1894:   Level: intermediate

1896: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1897: M*/

1899: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1900: {
1901:   PetscPartitioner_Chaco *p;
1902:   PetscErrorCode          ierr;

1906:   PetscNewLog(part, &p);
1907:   part->data = p;

1909:   PetscPartitionerInitialize_Chaco(part);
1910:   PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);
1911:   return(0);
1912: }

1914: static const char *ptypes[] = {"kway", "rb"};

1916: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1917: {
1918:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1919:   PetscErrorCode             ierr;

1922:   MPI_Comm_free(&p->pcomm);
1923:   PetscFree(p);
1924:   return(0);
1925: }

1927: static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1928: {
1929:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1930:   PetscErrorCode             ierr;

1933:   PetscViewerASCIIPushTab(viewer);
1934:   PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);
1935:   PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);
1936:   PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);
1937:   PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);
1938:   PetscViewerASCIIPopTab(viewer);
1939:   return(0);
1940: }

1942: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1943: {
1944:   PetscBool      iascii;

1950:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1951:   if (iascii) {PetscPartitionerView_ParMetis_Ascii(part, viewer);}
1952:   return(0);
1953: }

1955: static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1956: {
1957:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1958:   PetscErrorCode            ierr;

1961:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");
1962:   PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);
1963:   PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);
1964:   PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);
1965:   PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);
1966:   PetscOptionsTail();
1967:   return(0);
1968: }

1970: #if defined(PETSC_HAVE_PARMETIS)
1971: #include <parmetis.h>
1972: #endif

1974: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1975: {
1976: #if defined(PETSC_HAVE_PARMETIS)
1977:   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1978:   MPI_Comm       comm;
1979:   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1980:   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1981:   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1982:   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1983:   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1984:   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1985:   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1986:   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1987:   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1988:   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1989:   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1990:   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1991:   PetscInt       options[64];               /* Options */
1992:   PetscInt       v, i, *assignment, *points;
1993:   PetscMPIInt    p, size, rank;
1994:   PetscBool      hasempty = PETSC_FALSE;

1998:   PetscObjectGetComm((PetscObject) part, &comm);
1999:   MPI_Comm_size(comm, &size);
2000:   MPI_Comm_rank(comm, &rank);
2001:   /* Calculate vertex distribution */
2002:   PetscMalloc4(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);
2003:   vtxdist[0] = 0;
2004:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
2005:   for (p = 2; p <= size; ++p) {
2006:     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
2007:     vtxdist[p] += vtxdist[p-1];
2008:   }
2009:   /* null graph */
2010:   if (vtxdist[size] == 0) {
2011:     PetscFree4(vtxdist,tpwgts,ubvec,assignment);
2012:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
2013:     return(0);
2014:   }
2015:   /* Calculate partition weights */
2016:   if (targetSection) {
2017:     PetscInt p;
2018:     real_t   sumt = 0.0;

2020:     for (p = 0; p < nparts; ++p) {
2021:       PetscInt tpd;

2023:       PetscSectionGetDof(targetSection,p,&tpd);
2024:       sumt += tpd;
2025:       tpwgts[p] = tpd;
2026:     }
2027:     if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
2028:       for (p = 0, sumt = 0.0; p < nparts; ++p) {
2029:         tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
2030:         sumt += tpwgts[p];
2031:       }
2032:       for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
2033:       for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
2034:       tpwgts[nparts - 1] = 1. - sumt;
2035:     }
2036:   } else {
2037:     for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0/nparts;
2038:   }
2039:   ubvec[0] = pm->imbalanceRatio;

2041:   /* Weight cells */
2042:   if (vertSection) {
2043:     PetscMalloc1(nvtxs,&vwgt);
2044:     for (v = 0; v < nvtxs; ++v) {
2045:       PetscSectionGetDof(vertSection, v, &vwgt[v]);
2046:     }
2047:     wgtflag |= 2; /* have weights on graph vertices */
2048:   }

2050:   for (p = 0; !vtxdist[p+1] && p < size; ++p);
2051:   if (vtxdist[p+1] == vtxdist[size]) {
2052:     if (rank == p) {
2053:       METIS_SetDefaultOptions(options); /* initialize all defaults */
2054:       options[METIS_OPTION_DBGLVL] = pm->debugFlag;
2055:       options[METIS_OPTION_SEED]   = pm->randomSeed;
2056:       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
2057:       if (metis_ptype == 1) {
2058:         PetscStackPush("METIS_PartGraphRecursive");
2059:         METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2060:         PetscStackPop;
2061:         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
2062:       } else {
2063:         /*
2064:          It would be nice to activate the two options below, but they would need some actual testing.
2065:          - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
2066:          - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
2067:         */
2068:         /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
2069:         /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
2070:         PetscStackPush("METIS_PartGraphKway");
2071:         METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2072:         PetscStackPop;
2073:         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
2074:       }
2075:     }
2076:   } else {
2077:     MPI_Comm pcomm;

2079:     options[0] = 1; /*use options */
2080:     options[1] = pm->debugFlag;
2081:     options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */

2083:     if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
2084:       PetscInt cnt;

2086:       MPI_Comm_split(pm->pcomm,!!nvtxs,rank,&pcomm);
2087:       for (p=0,cnt=0;p<size;p++) {
2088:         if (vtxdist[p+1] != vtxdist[p]) {
2089:           vtxdist[cnt+1] = vtxdist[p+1];
2090:           cnt++;
2091:         }
2092:       }
2093:     } else pcomm = pm->pcomm;
2094:     if (nvtxs) {
2095:       PetscStackPush("ParMETIS_V3_PartKway");
2096:       ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
2097:       PetscStackPop;
2098:       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
2099:     }
2100:     if (hasempty) {
2101:       MPI_Comm_free(&pcomm);
2102:     }
2103:   }

2105:   /* Convert to PetscSection+IS */
2106:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
2107:   PetscMalloc1(nvtxs, &points);
2108:   for (p = 0, i = 0; p < nparts; ++p) {
2109:     for (v = 0; v < nvtxs; ++v) {
2110:       if (assignment[v] == p) points[i++] = v;
2111:     }
2112:   }
2113:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2114:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
2115:   PetscFree4(vtxdist,tpwgts,ubvec,assignment);
2116:   PetscFree(vwgt);
2117:   return(0);
2118: #else
2119:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
2120: #endif
2121: }

2123: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
2124: {
2126:   part->noGraph             = PETSC_FALSE;
2127:   part->ops->view           = PetscPartitionerView_ParMetis;
2128:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
2129:   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
2130:   part->ops->partition      = PetscPartitionerPartition_ParMetis;
2131:   return(0);
2132: }

2134: /*MC
2135:   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library

2137:   Level: intermediate

2139:   Options Database Keys:
2140: +  -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
2141: .  -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
2142: .  -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
2143: -  -petscpartitioner_parmetis_seed <int> - Random seed

2145:   Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS

2147: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2148: M*/

2150: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
2151: {
2152:   PetscPartitioner_ParMetis *p;
2153:   PetscErrorCode          ierr;

2157:   PetscNewLog(part, &p);
2158:   part->data = p;

2160:   MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);
2161:   p->ptype          = 0;
2162:   p->imbalanceRatio = 1.05;
2163:   p->debugFlag      = 0;
2164:   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */

2166:   PetscPartitionerInitialize_ParMetis(part);
2167:   PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);
2168:   return(0);
2169: }

2171: #if defined(PETSC_HAVE_PTSCOTCH)

2173: EXTERN_C_BEGIN
2174: #include <ptscotch.h>
2175: EXTERN_C_END

2177: #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)

2179: static int PTScotch_Strategy(PetscInt strategy)
2180: {
2181:   switch (strategy) {
2182:   case  0: return SCOTCH_STRATDEFAULT;
2183:   case  1: return SCOTCH_STRATQUALITY;
2184:   case  2: return SCOTCH_STRATSPEED;
2185:   case  3: return SCOTCH_STRATBALANCE;
2186:   case  4: return SCOTCH_STRATSAFETY;
2187:   case  5: return SCOTCH_STRATSCALABILITY;
2188:   case  6: return SCOTCH_STRATRECURSIVE;
2189:   case  7: return SCOTCH_STRATREMAP;
2190:   default: return SCOTCH_STRATDEFAULT;
2191:   }
2192: }

2194: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2195:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
2196: {
2197:   SCOTCH_Graph   grafdat;
2198:   SCOTCH_Strat   stradat;
2199:   SCOTCH_Num     vertnbr = n;
2200:   SCOTCH_Num     edgenbr = xadj[n];
2201:   SCOTCH_Num*    velotab = vtxwgt;
2202:   SCOTCH_Num*    edlotab = adjwgt;
2203:   SCOTCH_Num     flagval = strategy;
2204:   double         kbalval = imbalance;

2208:   {
2209:     PetscBool flg = PETSC_TRUE;
2210:     PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");
2211:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
2212:     if (!flg) velotab = NULL;
2213:   }
2214:   SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
2215:   SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
2216:   SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2217:   SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
2218:   if (tpart) {
2219:     SCOTCH_Arch archdat;
2220:     SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2221:     SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2222:     SCOTCH_graphMap(&grafdat, &archdat, &stradat, part);CHKERRPTSCOTCH(ierr);
2223:     SCOTCH_archExit(&archdat);
2224:   } else {
2225:     SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
2226:   }
2227:   SCOTCH_stratExit(&stradat);
2228:   SCOTCH_graphExit(&grafdat);
2229:   return(0);
2230: }

2232: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2233:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
2234: {
2235:   PetscMPIInt     procglbnbr;
2236:   PetscMPIInt     proclocnum;
2237:   SCOTCH_Arch     archdat;
2238:   SCOTCH_Dgraph   grafdat;
2239:   SCOTCH_Dmapping mappdat;
2240:   SCOTCH_Strat    stradat;
2241:   SCOTCH_Num      vertlocnbr;
2242:   SCOTCH_Num      edgelocnbr;
2243:   SCOTCH_Num*     veloloctab = vtxwgt;
2244:   SCOTCH_Num*     edloloctab = adjwgt;
2245:   SCOTCH_Num      flagval = strategy;
2246:   double          kbalval = imbalance;
2247:   PetscErrorCode  ierr;

2250:   {
2251:     PetscBool flg = PETSC_TRUE;
2252:     PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");
2253:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
2254:     if (!flg) veloloctab = NULL;
2255:   }
2256:   MPI_Comm_size(comm, &procglbnbr);
2257:   MPI_Comm_rank(comm, &proclocnum);
2258:   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
2259:   edgelocnbr = xadj[vertlocnbr];

2261:   SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
2262:   SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
2263:   SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2264:   SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);
2265:   SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2266:   if (tpart) { /* target partition weights */
2267:     SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2268:   } else {
2269:     SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2270:   }
2271:   SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);

2273:   SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2274:   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2275:   SCOTCH_archExit(&archdat);
2276:   SCOTCH_stratExit(&stradat);
2277:   SCOTCH_dgraphExit(&grafdat);
2278:   return(0);
2279: }

2281: #endif /* PETSC_HAVE_PTSCOTCH */

2283: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2284: {
2285:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2286:   PetscErrorCode             ierr;

2289:   MPI_Comm_free(&p->pcomm);
2290:   PetscFree(p);
2291:   return(0);
2292: }

2294: static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2295: {
2296:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2297:   PetscErrorCode            ierr;

2300:   PetscViewerASCIIPushTab(viewer);
2301:   PetscViewerASCIIPrintf(viewer,"using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);
2302:   PetscViewerASCIIPrintf(viewer,"using load imbalance ratio %g\n",(double)p->imbalance);
2303:   PetscViewerASCIIPopTab(viewer);
2304:   return(0);
2305: }

2307: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2308: {
2309:   PetscBool      iascii;

2315:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
2316:   if (iascii) {PetscPartitionerView_PTScotch_Ascii(part, viewer);}
2317:   return(0);
2318: }

2320: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2321: {
2322:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2323:   const char *const         *slist = PTScotchStrategyList;
2324:   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2325:   PetscBool                 flag;
2326:   PetscErrorCode            ierr;

2329:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");
2330:   PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);
2331:   PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);
2332:   PetscOptionsTail();
2333:   return(0);
2334: }

2336: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
2337: {
2338: #if defined(PETSC_HAVE_PTSCOTCH)
2339:   MPI_Comm       comm;
2340:   PetscInt       nvtxs = numVertices;   /* The number of vertices in full graph */
2341:   PetscInt       *vtxdist;              /* Distribution of vertices across processes */
2342:   PetscInt       *xadj   = start;       /* Start of edge list for each vertex */
2343:   PetscInt       *adjncy = adjacency;   /* Edge lists for all vertices */
2344:   PetscInt       *vwgt   = NULL;        /* Vertex weights */
2345:   PetscInt       *adjwgt = NULL;        /* Edge weights */
2346:   PetscInt       v, i, *assignment, *points;
2347:   PetscMPIInt    size, rank, p;
2348:   PetscBool      hasempty = PETSC_FALSE;
2349:   PetscInt       *tpwgts = NULL;

2353:   PetscObjectGetComm((PetscObject)part,&comm);
2354:   MPI_Comm_size(comm, &size);
2355:   MPI_Comm_rank(comm, &rank);
2356:   PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);
2357:   /* Calculate vertex distribution */
2358:   vtxdist[0] = 0;
2359:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
2360:   for (p = 2; p <= size; ++p) {
2361:     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
2362:     vtxdist[p] += vtxdist[p-1];
2363:   }
2364:   /* null graph */
2365:   if (vtxdist[size] == 0) {
2366:     PetscFree2(vtxdist, assignment);
2367:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
2368:     return(0);
2369:   }

2371:   /* Calculate vertex weights */
2372:   if (vertSection) {
2373:     PetscMalloc1(nvtxs,&vwgt);
2374:     for (v = 0; v < nvtxs; ++v) {
2375:       PetscSectionGetDof(vertSection, v, &vwgt[v]);
2376:     }
2377:   }

2379:   /* Calculate partition weights */
2380:   if (targetSection) {
2381:     PetscInt sumw;

2383:     PetscCalloc1(nparts,&tpwgts);
2384:     for (p = 0, sumw = 0; p < nparts; ++p) {
2385:       PetscSectionGetDof(targetSection,p,&tpwgts[p]);
2386:       sumw += tpwgts[p];
2387:     }
2388:     if (!sumw) {
2389:       PetscFree(tpwgts);
2390:     }
2391:   }

2393:   {
2394:     PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2395:     int                       strat = PTScotch_Strategy(pts->strategy);
2396:     double                    imbal = (double)pts->imbalance;

2398:     for (p = 0; !vtxdist[p+1] && p < size; ++p);
2399:     if (vtxdist[p+1] == vtxdist[size]) {
2400:       if (rank == p) {
2401:         PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment);
2402:       }
2403:     } else {
2404:       PetscInt cnt;
2405:       MPI_Comm pcomm;

2407:       if (hasempty) {
2408:         MPI_Comm_split(pts->pcomm,!!nvtxs,rank,&pcomm);
2409:         for (p=0,cnt=0;p<size;p++) {
2410:           if (vtxdist[p+1] != vtxdist[p]) {
2411:             vtxdist[cnt+1] = vtxdist[p+1];
2412:             cnt++;
2413:           }
2414:         }
2415:       } else pcomm = pts->pcomm;
2416:       if (nvtxs) {
2417:         PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm);
2418:       }
2419:       if (hasempty) {
2420:         MPI_Comm_free(&pcomm);
2421:       }
2422:     }
2423:   }
2424:   PetscFree(vwgt);
2425:   PetscFree(tpwgts);

2427:   /* Convert to PetscSection+IS */
2428:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
2429:   PetscMalloc1(nvtxs, &points);
2430:   for (p = 0, i = 0; p < nparts; ++p) {
2431:     for (v = 0; v < nvtxs; ++v) {
2432:       if (assignment[v] == p) points[i++] = v;
2433:     }
2434:   }
2435:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2436:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);

2438:   PetscFree2(vtxdist,assignment);
2439:   return(0);
2440: #else
2441:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2442: #endif
2443: }

2445: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2446: {
2448:   part->noGraph             = PETSC_FALSE;
2449:   part->ops->view           = PetscPartitionerView_PTScotch;
2450:   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2451:   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2452:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2453:   return(0);
2454: }

2456: /*MC
2457:   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library

2459:   Level: intermediate

2461:   Options Database Keys:
2462: +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
2463: -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio

2465:   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch

2467: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2468: M*/

2470: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2471: {
2472:   PetscPartitioner_PTScotch *p;
2473:   PetscErrorCode          ierr;

2477:   PetscNewLog(part, &p);
2478:   part->data = p;

2480:   MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);
2481:   p->strategy  = 0;
2482:   p->imbalance = 0.01;

2484:   PetscPartitionerInitialize_PTScotch(part);
2485:   PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);
2486:   return(0);
2487: }

2489: /*@
2490:   DMPlexGetPartitioner - Get the mesh partitioner

2492:   Not collective

2494:   Input Parameter:
2495: . dm - The DM

2497:   Output Parameter:
2498: . part - The PetscPartitioner

2500:   Level: developer

2502:   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.

2504: .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerDMPlexPartition(), PetscPartitionerCreate()
2505: @*/
2506: PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2507: {
2508:   DM_Plex       *mesh = (DM_Plex *) dm->data;

2513:   *part = mesh->partitioner;
2514:   return(0);
2515: }

2517: /*@
2518:   DMPlexSetPartitioner - Set the mesh partitioner

2520:   logically collective on DM

2522:   Input Parameters:
2523: + dm - The DM
2524: - part - The partitioner

2526:   Level: developer

2528:   Note: Any existing PetscPartitioner will be destroyed.

2530: .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2531: @*/
2532: PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2533: {
2534:   DM_Plex       *mesh = (DM_Plex *) dm->data;

2540:   PetscObjectReference((PetscObject)part);
2541:   PetscPartitionerDestroy(&mesh->partitioner);
2542:   mesh->partitioner = part;
2543:   return(0);
2544: }

2546: static PetscErrorCode DMPlexAddClosure_Private(DM dm, PetscHSetI ht, PetscInt point)
2547: {
2548:   const PetscInt *cone;
2549:   PetscInt       coneSize, c;
2550:   PetscBool      missing;

2554:   PetscHSetIQueryAdd(ht, point, &missing);
2555:   if (missing) {
2556:     DMPlexGetCone(dm, point, &cone);
2557:     DMPlexGetConeSize(dm, point, &coneSize);
2558:     for (c = 0; c < coneSize; c++) {
2559:       DMPlexAddClosure_Private(dm, ht, cone[c]);
2560:     }
2561:   }
2562:   return(0);
2563: }

2565: PETSC_UNUSED static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2566: {

2570:   if (up) {
2571:     PetscInt parent;

2573:     DMPlexGetTreeParent(dm,point,&parent,NULL);
2574:     if (parent != point) {
2575:       PetscInt closureSize, *closure = NULL, i;

2577:       DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
2578:       for (i = 0; i < closureSize; i++) {
2579:         PetscInt cpoint = closure[2*i];

2581:         PetscHSetIAdd(ht, cpoint);
2582:         DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);
2583:       }
2584:       DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
2585:     }
2586:   }
2587:   if (down) {
2588:     PetscInt numChildren;
2589:     const PetscInt *children;

2591:     DMPlexGetTreeChildren(dm,point,&numChildren,&children);
2592:     if (numChildren) {
2593:       PetscInt i;

2595:       for (i = 0; i < numChildren; i++) {
2596:         PetscInt cpoint = children[i];

2598:         PetscHSetIAdd(ht, cpoint);
2599:         DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);
2600:       }
2601:     }
2602:   }
2603:   return(0);
2604: }

2606: static PetscErrorCode DMPlexAddClosureTree_Up_Private(DM dm, PetscHSetI ht, PetscInt point)
2607: {
2608:   PetscInt       parent;

2612:   DMPlexGetTreeParent(dm, point, &parent,NULL);
2613:   if (point != parent) {
2614:     const PetscInt *cone;
2615:     PetscInt       coneSize, c;

2617:     DMPlexAddClosureTree_Up_Private(dm, ht, parent);
2618:     DMPlexAddClosure_Private(dm, ht, parent);
2619:     DMPlexGetCone(dm, parent, &cone);
2620:     DMPlexGetConeSize(dm, parent, &coneSize);
2621:     for (c = 0; c < coneSize; c++) {
2622:       const PetscInt cp = cone[c];

2624:       DMPlexAddClosureTree_Up_Private(dm, ht, cp);
2625:     }
2626:   }
2627:   return(0);
2628: }

2630: static PetscErrorCode DMPlexAddClosureTree_Down_Private(DM dm, PetscHSetI ht, PetscInt point)
2631: {
2632:   PetscInt       i, numChildren;
2633:   const PetscInt *children;

2637:   DMPlexGetTreeChildren(dm, point, &numChildren, &children);
2638:   for (i = 0; i < numChildren; i++) {
2639:     PetscHSetIAdd(ht, children[i]);
2640:   }
2641:   return(0);
2642: }

2644: static PetscErrorCode DMPlexAddClosureTree_Private(DM dm, PetscHSetI ht, PetscInt point)
2645: {
2646:   const PetscInt *cone;
2647:   PetscInt       coneSize, c;

2651:   PetscHSetIAdd(ht, point);
2652:   DMPlexAddClosureTree_Up_Private(dm, ht, point);
2653:   DMPlexAddClosureTree_Down_Private(dm, ht, point);
2654:   DMPlexGetCone(dm, point, &cone);
2655:   DMPlexGetConeSize(dm, point, &coneSize);
2656:   for (c = 0; c < coneSize; c++) {
2657:     DMPlexAddClosureTree_Private(dm, ht, cone[c]);
2658:   }
2659:   return(0);
2660: }

2662: PetscErrorCode DMPlexClosurePoints_Private(DM dm, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2663: {
2664:   DM_Plex         *mesh = (DM_Plex *)dm->data;
2665:   const PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2666:   PetscInt        nelems, *elems, off = 0, p;
2667:   PetscHSetI      ht;
2668:   PetscErrorCode  ierr;

2671:   PetscHSetICreate(&ht);
2672:   PetscHSetIResize(ht, numPoints*16);
2673:   if (!hasTree) {
2674:     for (p = 0; p < numPoints; ++p) {
2675:       DMPlexAddClosure_Private(dm, ht, points[p]);
2676:     }
2677:   } else {
2678: #if 1
2679:     for (p = 0; p < numPoints; ++p) {
2680:       DMPlexAddClosureTree_Private(dm, ht, points[p]);
2681:     }
2682: #else
2683:     PetscInt  *closure = NULL, closureSize, c;
2684:     for (p = 0; p < numPoints; ++p) {
2685:       DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
2686:       for (c = 0; c < closureSize*2; c += 2) {
2687:         PetscHSetIAdd(ht, closure[c]);
2688:         if (hasTree) {DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);}
2689:       }
2690:     }
2691:     if (closure) {DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);}
2692: #endif
2693:   }
2694:   PetscHSetIGetSize(ht, &nelems);
2695:   PetscMalloc1(nelems, &elems);
2696:   PetscHSetIGetElems(ht, &off, elems);
2697:   PetscHSetIDestroy(&ht);
2698:   PetscSortInt(nelems, elems);
2699:   ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);
2700:   return(0);
2701: }

2703: /*@
2704:   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label

2706:   Input Parameters:
2707: + dm     - The DM
2708: - label  - DMLabel assinging ranks to remote roots

2710:   Level: developer

2712: .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2713: @*/
2714: PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2715: {
2716:   IS              rankIS,   pointIS, closureIS;
2717:   const PetscInt *ranks,   *points;
2718:   PetscInt        numRanks, numPoints, r;
2719:   PetscErrorCode  ierr;

2722:   DMLabelGetValueIS(label, &rankIS);
2723:   ISGetLocalSize(rankIS, &numRanks);
2724:   ISGetIndices(rankIS, &ranks);
2725:   for (r = 0; r < numRanks; ++r) {
2726:     const PetscInt rank = ranks[r];
2727:     DMLabelGetStratumIS(label, rank, &pointIS);
2728:     ISGetLocalSize(pointIS, &numPoints);
2729:     ISGetIndices(pointIS, &points);
2730:     DMPlexClosurePoints_Private(dm, numPoints, points, &closureIS);
2731:     ISRestoreIndices(pointIS, &points);
2732:     ISDestroy(&pointIS);
2733:     DMLabelSetStratumIS(label, rank, closureIS);
2734:     ISDestroy(&closureIS);
2735:   }
2736:   ISRestoreIndices(rankIS, &ranks);
2737:   ISDestroy(&rankIS);
2738:   return(0);
2739: }

2741: /*@
2742:   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label

2744:   Input Parameters:
2745: + dm     - The DM
2746: - label  - DMLabel assinging ranks to remote roots

2748:   Level: developer

2750: .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2751: @*/
2752: PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2753: {
2754:   IS              rankIS,   pointIS;
2755:   const PetscInt *ranks,   *points;
2756:   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2757:   PetscInt       *adj = NULL;
2758:   PetscErrorCode  ierr;

2761:   DMLabelGetValueIS(label, &rankIS);
2762:   ISGetLocalSize(rankIS, &numRanks);
2763:   ISGetIndices(rankIS, &ranks);
2764:   for (r = 0; r < numRanks; ++r) {
2765:     const PetscInt rank = ranks[r];

2767:     DMLabelGetStratumIS(label, rank, &pointIS);
2768:     ISGetLocalSize(pointIS, &numPoints);
2769:     ISGetIndices(pointIS, &points);
2770:     for (p = 0; p < numPoints; ++p) {
2771:       adjSize = PETSC_DETERMINE;
2772:       DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);
2773:       for (a = 0; a < adjSize; ++a) {DMLabelSetValue(label, adj[a], rank);}
2774:     }
2775:     ISRestoreIndices(pointIS, &points);
2776:     ISDestroy(&pointIS);
2777:   }
2778:   ISRestoreIndices(rankIS, &ranks);
2779:   ISDestroy(&rankIS);
2780:   PetscFree(adj);
2781:   return(0);
2782: }

2784: /*@
2785:   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF

2787:   Input Parameters:
2788: + dm     - The DM
2789: - label  - DMLabel assinging ranks to remote roots

2791:   Level: developer

2793:   Note: This is required when generating multi-level overlaps to capture
2794:   overlap points from non-neighbouring partitions.

2796: .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2797: @*/
2798: PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2799: {
2800:   MPI_Comm        comm;
2801:   PetscMPIInt     rank;
2802:   PetscSF         sfPoint;
2803:   DMLabel         lblRoots, lblLeaves;
2804:   IS              rankIS, pointIS;
2805:   const PetscInt *ranks;
2806:   PetscInt        numRanks, r;
2807:   PetscErrorCode  ierr;

2810:   PetscObjectGetComm((PetscObject) dm, &comm);
2811:   MPI_Comm_rank(comm, &rank);
2812:   DMGetPointSF(dm, &sfPoint);
2813:   /* Pull point contributions from remote leaves into local roots */
2814:   DMLabelGather(label, sfPoint, &lblLeaves);
2815:   DMLabelGetValueIS(lblLeaves, &rankIS);
2816:   ISGetLocalSize(rankIS, &numRanks);
2817:   ISGetIndices(rankIS, &ranks);
2818:   for (r = 0; r < numRanks; ++r) {
2819:     const PetscInt remoteRank = ranks[r];
2820:     if (remoteRank == rank) continue;
2821:     DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);
2822:     DMLabelInsertIS(label, pointIS, remoteRank);
2823:     ISDestroy(&pointIS);
2824:   }
2825:   ISRestoreIndices(rankIS, &ranks);
2826:   ISDestroy(&rankIS);
2827:   DMLabelDestroy(&lblLeaves);
2828:   /* Push point contributions from roots into remote leaves */
2829:   DMLabelDistribute(label, sfPoint, &lblRoots);
2830:   DMLabelGetValueIS(lblRoots, &rankIS);
2831:   ISGetLocalSize(rankIS, &numRanks);
2832:   ISGetIndices(rankIS, &ranks);
2833:   for (r = 0; r < numRanks; ++r) {
2834:     const PetscInt remoteRank = ranks[r];
2835:     if (remoteRank == rank) continue;
2836:     DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);
2837:     DMLabelInsertIS(label, pointIS, remoteRank);
2838:     ISDestroy(&pointIS);
2839:   }
2840:   ISRestoreIndices(rankIS, &ranks);
2841:   ISDestroy(&rankIS);
2842:   DMLabelDestroy(&lblRoots);
2843:   return(0);
2844: }

2846: /*@
2847:   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label

2849:   Input Parameters:
2850: + dm        - The DM
2851: . rootLabel - DMLabel assinging ranks to local roots
2852: - processSF - A star forest mapping into the local index on each remote rank

2854:   Output Parameter:
2855: . leafLabel - DMLabel assinging ranks to remote roots

2857:   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2858:   resulting leafLabel is a receiver mapping of remote roots to their parent rank.

2860:   Level: developer

2862: .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2863: @*/
2864: PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2865: {
2866:   MPI_Comm           comm;
2867:   PetscMPIInt        rank, size, r;
2868:   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2869:   PetscSF            sfPoint;
2870:   PetscSection       rootSection;
2871:   PetscSFNode       *rootPoints, *leafPoints;
2872:   const PetscSFNode *remote;
2873:   const PetscInt    *local, *neighbors;
2874:   IS                 valueIS;
2875:   PetscBool          mpiOverflow = PETSC_FALSE;
2876:   PetscErrorCode     ierr;

2879:   PetscLogEventBegin(DMPLEX_PartLabelInvert,dm,0,0,0);
2880:   PetscObjectGetComm((PetscObject) dm, &comm);
2881:   MPI_Comm_rank(comm, &rank);
2882:   MPI_Comm_size(comm, &size);
2883:   DMGetPointSF(dm, &sfPoint);

2885:   /* Convert to (point, rank) and use actual owners */
2886:   PetscSectionCreate(comm, &rootSection);
2887:   PetscSectionSetChart(rootSection, 0, size);
2888:   DMLabelGetValueIS(rootLabel, &valueIS);
2889:   ISGetLocalSize(valueIS, &numNeighbors);
2890:   ISGetIndices(valueIS, &neighbors);
2891:   for (n = 0; n < numNeighbors; ++n) {
2892:     DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);
2893:     PetscSectionAddDof(rootSection, neighbors[n], numPoints);
2894:   }
2895:   PetscSectionSetUp(rootSection);
2896:   PetscSectionGetStorageSize(rootSection, &rootSize);
2897:   PetscMalloc1(rootSize, &rootPoints);
2898:   PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
2899:   for (n = 0; n < numNeighbors; ++n) {
2900:     IS              pointIS;
2901:     const PetscInt *points;

2903:     PetscSectionGetOffset(rootSection, neighbors[n], &off);
2904:     DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);
2905:     ISGetLocalSize(pointIS, &numPoints);
2906:     ISGetIndices(pointIS, &points);
2907:     for (p = 0; p < numPoints; ++p) {
2908:       if (local) {PetscFindInt(points[p], nleaves, local, &l);}
2909:       else       {l = -1;}
2910:       if (l >= 0) {rootPoints[off+p] = remote[l];}
2911:       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2912:     }
2913:     ISRestoreIndices(pointIS, &points);
2914:     ISDestroy(&pointIS);
2915:   }

2917:   /* Try to communicate overlap using All-to-All */
2918:   if (!processSF) {
2919:     PetscInt64  counter = 0;
2920:     PetscBool   locOverflow = PETSC_FALSE;
2921:     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;

2923:     PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);
2924:     for (n = 0; n < numNeighbors; ++n) {
2925:       PetscSectionGetDof(rootSection, neighbors[n], &dof);
2926:       PetscSectionGetOffset(rootSection, neighbors[n], &off);
2927: #if defined(PETSC_USE_64BIT_INDICES)
2928:       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2929:       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2930: #endif
2931:       scounts[neighbors[n]] = (PetscMPIInt) dof;
2932:       sdispls[neighbors[n]] = (PetscMPIInt) off;
2933:     }
2934:     MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);
2935:     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2936:     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2937:     MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);
2938:     if (!mpiOverflow) {
2939:       PetscInfo(dm,"Using Alltoallv for mesh distribution\n");
2940:       leafSize = (PetscInt) counter;
2941:       PetscMalloc1(leafSize, &leafPoints);
2942:       MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);
2943:     }
2944:     PetscFree4(scounts, sdispls, rcounts, rdispls);
2945:   }

2947:   /* Communicate overlap using process star forest */
2948:   if (processSF || mpiOverflow) {
2949:     PetscSF      procSF;
2950:     PetscSection leafSection;

2952:     if (processSF) {
2953:       PetscInfo(dm,"Using processSF for mesh distribution\n");
2954:       PetscObjectReference((PetscObject)processSF);
2955:       procSF = processSF;
2956:     } else {
2957:       PetscInfo(dm,"Using processSF for mesh distribution (MPI overflow)\n");
2958:       PetscSFCreate(comm,&procSF);
2959:       PetscSFSetGraphWithPattern(procSF,NULL,PETSCSF_PATTERN_ALLTOALL);
2960:     }

2962:     PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);
2963:     DMPlexDistributeData(dm, procSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);
2964:     PetscSectionGetStorageSize(leafSection, &leafSize);
2965:     PetscSectionDestroy(&leafSection);
2966:     PetscSFDestroy(&procSF);
2967:   }

2969:   for (p = 0; p < leafSize; p++) {
2970:     DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);
2971:   }

2973:   ISRestoreIndices(valueIS, &neighbors);
2974:   ISDestroy(&valueIS);
2975:   PetscSectionDestroy(&rootSection);
2976:   PetscFree(rootPoints);
2977:   PetscFree(leafPoints);
2978:   PetscLogEventEnd(DMPLEX_PartLabelInvert,dm,0,0,0);
2979:   return(0);
2980: }

2982: /*@
2983:   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points

2985:   Input Parameters:
2986: + dm    - The DM
2987: - label - DMLabel assinging ranks to remote roots

2989:   Output Parameter:
2990: . sf    - The star forest communication context encapsulating the defined mapping

2992:   Note: The incoming label is a receiver mapping of remote points to their parent rank.

2994:   Level: developer

2996: .seealso: DMPlexDistribute(), DMPlexCreateOverlap()
2997: @*/
2998: PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2999: {
3000:   PetscMPIInt     rank;
3001:   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0, nNeighbors;
3002:   PetscSFNode    *remotePoints;
3003:   IS              remoteRootIS, neighborsIS;
3004:   const PetscInt *remoteRoots, *neighbors;

3008:   PetscLogEventBegin(DMPLEX_PartLabelCreateSF,dm,0,0,0);
3009:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);

3011:   DMLabelGetValueIS(label, &neighborsIS);
3012: #if 0
3013:   {
3014:     IS is;
3015:     ISDuplicate(neighborsIS, &is);
3016:     ISSort(is);
3017:     ISDestroy(&neighborsIS);
3018:     neighborsIS = is;
3019:   }
3020: #endif
3021:   ISGetLocalSize(neighborsIS, &nNeighbors);
3022:   ISGetIndices(neighborsIS, &neighbors);
3023:   for (numRemote = 0, n = 0; n < nNeighbors; ++n) {
3024:     DMLabelGetStratumSize(label, neighbors[n], &numPoints);
3025:     numRemote += numPoints;
3026:   }
3027:   PetscMalloc1(numRemote, &remotePoints);
3028:   /* Put owned points first */
3029:   DMLabelGetStratumSize(label, rank, &numPoints);
3030:   if (numPoints > 0) {
3031:     DMLabelGetStratumIS(label, rank, &remoteRootIS);
3032:     ISGetIndices(remoteRootIS, &remoteRoots);
3033:     for (p = 0; p < numPoints; p++) {
3034:       remotePoints[idx].index = remoteRoots[p];
3035:       remotePoints[idx].rank = rank;
3036:       idx++;
3037:     }
3038:     ISRestoreIndices(remoteRootIS, &remoteRoots);
3039:     ISDestroy(&remoteRootIS);
3040:   }
3041:   /* Now add remote points */
3042:   for (n = 0; n < nNeighbors; ++n) {
3043:     const PetscInt nn = neighbors[n];

3045:     DMLabelGetStratumSize(label, nn, &numPoints);
3046:     if (nn == rank || numPoints <= 0) continue;
3047:     DMLabelGetStratumIS(label, nn, &remoteRootIS);
3048:     ISGetIndices(remoteRootIS, &remoteRoots);
3049:     for (p = 0; p < numPoints; p++) {
3050:       remotePoints[idx].index = remoteRoots[p];
3051:       remotePoints[idx].rank = nn;
3052:       idx++;
3053:     }
3054:     ISRestoreIndices(remoteRootIS, &remoteRoots);
3055:     ISDestroy(&remoteRootIS);
3056:   }
3057:   PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);
3058:   DMPlexGetChart(dm, &pStart, &pEnd);
3059:   PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
3060:   PetscSFSetUp(*sf);
3061:   ISDestroy(&neighborsIS);
3062:   PetscLogEventEnd(DMPLEX_PartLabelCreateSF,dm,0,0,0);
3063:   return(0);
3064: }

3066: /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
3067:  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
3068:  * them out in that case. */
3069: #if defined(PETSC_HAVE_PARMETIS)
3070: /*@C

3072:   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).

3074:   Input parameters:
3075: + dm                - The DMPlex object.
3076: . n                 - The number of points.
3077: . pointsToRewrite   - The points in the SF whose ownership will change.
3078: . targetOwners      - New owner for each element in pointsToRewrite.
3079: - degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.

3081:   Level: developer

3083: @*/
3084: static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
3085: {
3086:   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
3087:   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
3088:   PetscSFNode  *leafLocationsNew;
3089:   const         PetscSFNode *iremote;
3090:   const         PetscInt *ilocal;
3091:   PetscBool    *isLeaf;
3092:   PetscSF       sf;
3093:   MPI_Comm      comm;
3094:   PetscMPIInt   rank, size;

3097:   PetscObjectGetComm((PetscObject) dm, &comm);
3098:   MPI_Comm_rank(comm, &rank);
3099:   MPI_Comm_size(comm, &size);
3100:   DMPlexGetChart(dm, &pStart, &pEnd);

3102:   DMGetPointSF(dm, &sf);
3103:   PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); 
3104:   PetscMalloc1(pEnd-pStart, &isLeaf);
3105:   for (i=0; i<pEnd-pStart; i++) {
3106:     isLeaf[i] = PETSC_FALSE;
3107:   }
3108:   for (i=0; i<nleafs; i++) {
3109:     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3110:   }

3112:   PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);
3113:   cumSumDegrees[0] = 0;
3114:   for (i=1; i<=pEnd-pStart; i++) {
3115:     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
3116:   }
3117:   sumDegrees = cumSumDegrees[pEnd-pStart];
3118:   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */

3120:   PetscMalloc1(sumDegrees, &locationsOfLeafs);
3121:   PetscMalloc1(pEnd-pStart, &rankOnLeafs);
3122:   for (i=0; i<pEnd-pStart; i++) {
3123:     rankOnLeafs[i] = rank;
3124:   }
3125:   PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);
3126:   PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);
3127:   PetscFree(rankOnLeafs);

3129:   /* get the remote local points of my leaves */
3130:   PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);
3131:   PetscMalloc1(pEnd-pStart, &points);
3132:   for (i=0; i<pEnd-pStart; i++) {
3133:     points[i] = pStart+i;
3134:   }
3135:   PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);
3136:   PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);
3137:   PetscFree(points);
3138:   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
3139:   PetscMalloc1(pEnd-pStart, &newOwners);
3140:   PetscMalloc1(pEnd-pStart, &newNumbers);
3141:   for (i=0; i<pEnd-pStart; i++) {
3142:     newOwners[i] = -1;
3143:     newNumbers[i] = -1;
3144:   }
3145:   {
3146:     PetscInt oldNumber, newNumber, oldOwner, newOwner;
3147:     for (i=0; i<n; i++) {
3148:       oldNumber = pointsToRewrite[i];
3149:       newNumber = -1;
3150:       oldOwner = rank;
3151:       newOwner = targetOwners[i];
3152:       if (oldOwner == newOwner) {
3153:         newNumber = oldNumber;
3154:       } else {
3155:         for (j=0; j<degrees[oldNumber]; j++) {
3156:           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
3157:             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
3158:             break;
3159:           }
3160:         }
3161:       }
3162:       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");

3164:       newOwners[oldNumber] = newOwner;
3165:       newNumbers[oldNumber] = newNumber;
3166:     }
3167:   }
3168:   PetscFree(cumSumDegrees);
3169:   PetscFree(locationsOfLeafs);
3170:   PetscFree(remoteLocalPointOfLeafs);

3172:   PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);
3173:   PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);
3174:   PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);
3175:   PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);

3177:   /* Now count how many leafs we have on each processor. */
3178:   leafCounter=0;
3179:   for (i=0; i<pEnd-pStart; i++) {
3180:     if (newOwners[i] >= 0) {
3181:       if (newOwners[i] != rank) {
3182:         leafCounter++;
3183:       }
3184:     } else {
3185:       if (isLeaf[i]) {
3186:         leafCounter++;
3187:       }
3188:     }
3189:   }

3191:   /* Now set up the new sf by creating the leaf arrays */
3192:   PetscMalloc1(leafCounter, &leafsNew);
3193:   PetscMalloc1(leafCounter, &leafLocationsNew);

3195:   leafCounter = 0;
3196:   counter = 0;
3197:   for (i=0; i<pEnd-pStart; i++) {
3198:     if (newOwners[i] >= 0) {
3199:       if (newOwners[i] != rank) {
3200:         leafsNew[leafCounter] = i;
3201:         leafLocationsNew[leafCounter].rank = newOwners[i];
3202:         leafLocationsNew[leafCounter].index = newNumbers[i];
3203:         leafCounter++;
3204:       }
3205:     } else {
3206:       if (isLeaf[i]) {
3207:         leafsNew[leafCounter] = i;
3208:         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
3209:         leafLocationsNew[leafCounter].index = iremote[counter].index;
3210:         leafCounter++;
3211:       }
3212:     }
3213:     if (isLeaf[i]) {
3214:       counter++;
3215:     }
3216:   }

3218:   PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);
3219:   PetscFree(newOwners);
3220:   PetscFree(newNumbers);
3221:   PetscFree(isLeaf);
3222:   return(0);
3223: }

3225: static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
3226: {
3227:   PetscInt *distribution, min, max, sum, i, ierr;
3228:   PetscMPIInt rank, size;
3230:   MPI_Comm_size(comm, &size);
3231:   MPI_Comm_rank(comm, &rank);
3232:   PetscCalloc1(size, &distribution);
3233:   for (i=0; i<n; i++) {
3234:     if (part) distribution[part[i]] += vtxwgt[skip*i];
3235:     else distribution[rank] += vtxwgt[skip*i];
3236:   }
3237:   MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);
3238:   min = distribution[0];
3239:   max = distribution[0];
3240:   sum = distribution[0];
3241:   for (i=1; i<size; i++) {
3242:     if (distribution[i]<min) min=distribution[i];
3243:     if (distribution[i]>max) max=distribution[i];
3244:     sum += distribution[i];
3245:   }
3246:   PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);
3247:   PetscFree(distribution);
3248:   return(0);
3249: }

3251: #endif

3253: /*@
3254:   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.

3256:   Input parameters:
3257: + dm               - The DMPlex object.
3258: . entityDepth      - depth of the entity to balance (0 -> balance vertices).
3259: . useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
3260: - parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.

3262:   Output parameters:
3263: . success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.

3265:   Level: intermediate

3267: @*/

3269: PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
3270: {
3271: #if defined(PETSC_HAVE_PARMETIS)
3272:   PetscSF     sf;
3273:   PetscInt    ierr, i, j, idx, jdx;
3274:   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
3275:   const       PetscInt *degrees, *ilocal;
3276:   const       PetscSFNode *iremote;
3277:   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
3278:   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
3279:   PetscMPIInt rank, size;
3280:   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
3281:   const       PetscInt *cumSumVertices;
3282:   PetscInt    offset, counter;
3283:   PetscInt    lenadjncy;
3284:   PetscInt    *xadj, *adjncy, *vtxwgt;
3285:   PetscInt    lenxadj;
3286:   PetscInt    *adjwgt = NULL;
3287:   PetscInt    *part, *options;
3288:   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
3289:   real_t      *ubvec;
3290:   PetscInt    *firstVertices, *renumbering;
3291:   PetscInt    failed, failedGlobal;
3292:   MPI_Comm    comm;
3293:   Mat         A, Apre;
3294:   const char *prefix = NULL;
3295:   PetscViewer       viewer;
3296:   PetscViewerFormat format;
3297:   PetscLayout layout;

3300:   if (success) *success = PETSC_FALSE;
3301:   PetscObjectGetComm((PetscObject) dm, &comm);
3302:   MPI_Comm_rank(comm, &rank);
3303:   MPI_Comm_size(comm, &size);
3304:   if (size==1) return(0);

3306:   PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);

3308:   PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);
3309:   if (viewer) {
3310:     PetscViewerPushFormat(viewer,format);
3311:   }

3313:   /* Figure out all points in the plex that we are interested in balancing. */
3314:   DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);
3315:   DMPlexGetChart(dm, &pStart, &pEnd);
3316:   PetscMalloc1(pEnd-pStart, &toBalance);

3318:   for (i=0; i<pEnd-pStart; i++) {
3319:     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
3320:   }

3322:   /* There are three types of points:
3323:    * exclusivelyOwned: points that are owned by this process and only seen by this process
3324:    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
3325:    * leaf: a point that is seen by this process but owned by a different process
3326:    */
3327:   DMGetPointSF(dm, &sf);
3328:   PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); 
3329:   PetscMalloc1(pEnd-pStart, &isLeaf);
3330:   PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);
3331:   PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);
3332:   for (i=0; i<pEnd-pStart; i++) {
3333:     isNonExclusivelyOwned[i] = PETSC_FALSE;
3334:     isExclusivelyOwned[i] = PETSC_FALSE;
3335:     isLeaf[i] = PETSC_FALSE;
3336:   }

3338:   /* start by marking all the leafs */
3339:   for (i=0; i<nleafs; i++) {
3340:     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3341:   }

3343:   /* for an owned point, we can figure out whether another processor sees it or
3344:    * not by calculating its degree */
3345:   PetscSFComputeDegreeBegin(sf, &degrees);
3346:   PetscSFComputeDegreeEnd(sf, &degrees);

3348:   numExclusivelyOwned = 0;
3349:   numNonExclusivelyOwned = 0;
3350:   for (i=0; i<pEnd-pStart; i++) {
3351:     if (toBalance[i]) {
3352:       if (degrees[i] > 0) {
3353:         isNonExclusivelyOwned[i] = PETSC_TRUE;
3354:         numNonExclusivelyOwned += 1;
3355:       } else {
3356:         if (!isLeaf[i]) {
3357:           isExclusivelyOwned[i] = PETSC_TRUE;
3358:           numExclusivelyOwned += 1;
3359:         }
3360:       }
3361:     }
3362:   }

3364:   /* We are going to build a graph with one vertex per core representing the
3365:    * exclusively owned points and then one vertex per nonExclusively owned
3366:    * point. */

3368:   PetscLayoutCreate(comm, &layout);
3369:   PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);
3370:   PetscLayoutSetUp(layout);
3371:   PetscLayoutGetRanges(layout, &cumSumVertices);

3373:   PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);
3374:   for (i=0; i<pEnd-pStart; i++) {globalNumbersOfLocalOwnedVertices[i] = pStart - 1;}
3375:   offset = cumSumVertices[rank];
3376:   counter = 0;
3377:   for (i=0; i<pEnd-pStart; i++) {
3378:     if (toBalance[i]) {
3379:       if (degrees[i] > 0) {
3380:         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
3381:         counter++;
3382:       }
3383:     }
3384:   }

3386:   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
3387:   PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);
3388:   PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);
3389:   PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);

3391:   /* Now start building the data structure for ParMETIS */

3393:   MatCreate(comm, &Apre);
3394:   MatSetType(Apre, MATPREALLOCATOR);
3395:   MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);
3396:   MatSetUp(Apre);

3398:   for (i=0; i<pEnd-pStart; i++) {
3399:     if (toBalance[i]) {
3400:       idx = cumSumVertices[rank];
3401:       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3402:       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3403:       else continue;
3404:       MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);
3405:       MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);
3406:     }
3407:   }

3409:   MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);
3410:   MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);

3412:   MatCreate(comm, &A);
3413:   MatSetType(A, MATMPIAIJ);
3414:   MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);
3415:   MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);
3416:   MatDestroy(&Apre);

3418:   for (i=0; i<pEnd-pStart; i++) {
3419:     if (toBalance[i]) {
3420:       idx = cumSumVertices[rank];
3421:       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3422:       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3423:       else continue;
3424:       MatSetValue(A, idx, jdx, 1, INSERT_VALUES);
3425:       MatSetValue(A, jdx, idx, 1, INSERT_VALUES);
3426:     }
3427:   }

3429:   MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
3430:   MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
3431:   PetscFree(leafGlobalNumbers);
3432:   PetscFree(globalNumbersOfLocalOwnedVertices);

3434:   nparts = size;
3435:   wgtflag = 2;
3436:   numflag = 0;
3437:   ncon = 2;
3438:   real_t *tpwgts;
3439:   PetscMalloc1(ncon * nparts, &tpwgts);
3440:   for (i=0; i<ncon*nparts; i++) {
3441:     tpwgts[i] = 1./(nparts);
3442:   }

3444:   PetscMalloc1(ncon, &ubvec);
3445:   ubvec[0] = 1.01;
3446:   ubvec[1] = 1.01;
3447:   lenadjncy = 0;
3448:   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3449:     PetscInt temp=0;
3450:     MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);
3451:     lenadjncy += temp;
3452:     MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);
3453:   }
3454:   PetscMalloc1(lenadjncy, &adjncy);
3455:   lenxadj = 2 + numNonExclusivelyOwned;
3456:   PetscMalloc1(lenxadj, &xadj);
3457:   xadj[0] = 0;
3458:   counter = 0;
3459:   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3460:     PetscInt        temp=0;
3461:     const PetscInt *cols;
3462:     MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);
3463:     PetscArraycpy(&adjncy[counter], cols, temp);
3464:     counter += temp;
3465:     xadj[i+1] = counter;
3466:     MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);
3467:   }

3469:   PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);
3470:   PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);
3471:   vtxwgt[0] = numExclusivelyOwned;
3472:   if (ncon>1) vtxwgt[1] = 1;
3473:   for (i=0; i<numNonExclusivelyOwned; i++) {
3474:     vtxwgt[ncon*(i+1)] = 1;
3475:     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3476:   }

3478:   if (viewer) {
3479:     PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);
3480:     PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);
3481:   }
3482:   if (parallel) {
3483:     PetscMalloc1(4, &options);
3484:     options[0] = 1;
3485:     options[1] = 0; /* Verbosity */
3486:     options[2] = 0; /* Seed */
3487:     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3488:     if (viewer) { PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n"); }
3489:     if (useInitialGuess) {
3490:       if (viewer) { PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n"); }
3491:       PetscStackPush("ParMETIS_V3_RefineKway");
3492:       ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3493:       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3494:       PetscStackPop;
3495:     } else {
3496:       PetscStackPush("ParMETIS_V3_PartKway");
3497:       ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3498:       PetscStackPop;
3499:       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3500:     }
3501:     PetscFree(options);
3502:   } else {
3503:     if (viewer) { PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n"); }
3504:     Mat As;
3505:     PetscInt numRows;
3506:     PetscInt *partGlobal;
3507:     MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);

3509:     PetscInt *numExclusivelyOwnedAll;
3510:     PetscMalloc1(size, &numExclusivelyOwnedAll);
3511:     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3512:     MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);

3514:     MatGetSize(As, &numRows, NULL);
3515:     PetscMalloc1(numRows, &partGlobal);
3516:     if (!rank) {
3517:       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3518:       lenadjncy = 0;

3520:       for (i=0; i<numRows; i++) {
3521:         PetscInt temp=0;
3522:         MatGetRow(As, i, &temp, NULL, NULL);
3523:         lenadjncy += temp;
3524:         MatRestoreRow(As, i, &temp, NULL, NULL);
3525:       }
3526:       PetscMalloc1(lenadjncy, &adjncy_g);
3527:       lenxadj = 1 + numRows;
3528:       PetscMalloc1(lenxadj, &xadj_g);
3529:       xadj_g[0] = 0;
3530:       counter = 0;
3531:       for (i=0; i<numRows; i++) {
3532:         PetscInt        temp=0;
3533:         const PetscInt *cols;
3534:         MatGetRow(As, i, &temp, &cols, NULL);
3535:         PetscArraycpy(&adjncy_g[counter], cols, temp);
3536:         counter += temp;
3537:         xadj_g[i+1] = counter;
3538:         MatRestoreRow(As, i, &temp, &cols, NULL);
3539:       }
3540:       PetscMalloc1(2*numRows, &vtxwgt_g);
3541:       for (i=0; i<size; i++){
3542:         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3543:         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3544:         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3545:           vtxwgt_g[ncon*j] = 1;
3546:           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3547:         }
3548:       }
3549:       PetscMalloc1(64, &options);
3550:       METIS_SetDefaultOptions(options); /* initialize all defaults */
3551:       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3552:       options[METIS_OPTION_CONTIG] = 1;
3553:       PetscStackPush("METIS_PartGraphKway");
3554:       METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3555:       PetscStackPop;
3556:       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3557:       PetscFree(options);
3558:       PetscFree(xadj_g);
3559:       PetscFree(adjncy_g);
3560:       PetscFree(vtxwgt_g);
3561:     }
3562:     PetscFree(numExclusivelyOwnedAll);

3564:     /* Now scatter the parts array. */
3565:     {
3566:       PetscMPIInt *counts, *mpiCumSumVertices;
3567:       PetscMalloc1(size, &counts);
3568:       PetscMalloc1(size+1, &mpiCumSumVertices);
3569:       for(i=0; i<size; i++) {
3570:         PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));
3571:       }
3572:       for(i=0; i<=size; i++) {
3573:         PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));
3574:       }
3575:       MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);
3576:       PetscFree(counts);
3577:       PetscFree(mpiCumSumVertices);
3578:     }

3580:     PetscFree(partGlobal);
3581:     MatDestroy(&As);
3582:   }

3584:   MatDestroy(&A);
3585:   PetscFree(ubvec);
3586:   PetscFree(tpwgts);

3588:   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */

3590:   PetscMalloc1(size, &firstVertices);
3591:   PetscMalloc1(size, &renumbering);
3592:   firstVertices[rank] = part[0];
3593:   MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);
3594:   for (i=0; i<size; i++) {
3595:     renumbering[firstVertices[i]] = i;
3596:   }
3597:   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3598:     part[i] = renumbering[part[i]];
3599:   }
3600:   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3601:   failed = (PetscInt)(part[0] != rank);
3602:   MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);

3604:   PetscFree(firstVertices);
3605:   PetscFree(renumbering);

3607:   if (failedGlobal > 0) {
3608:     PetscLayoutDestroy(&layout);
3609:     PetscFree(xadj);
3610:     PetscFree(adjncy);
3611:     PetscFree(vtxwgt);
3612:     PetscFree(toBalance);
3613:     PetscFree(isLeaf);
3614:     PetscFree(isNonExclusivelyOwned);
3615:     PetscFree(isExclusivelyOwned);
3616:     PetscFree(part);
3617:     if (viewer) {
3618:       PetscViewerPopFormat(viewer);
3619:       PetscViewerDestroy(&viewer);
3620:     }
3621:     PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);
3622:     return(0);
3623:   }

3625:   /*Let's check how well we did distributing points*/
3626:   if (viewer) {
3627:     PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);
3628:     PetscViewerASCIIPrintf(viewer, "Initial.     ");
3629:     DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);
3630:     PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");
3631:     DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);
3632:   }

3634:   /* Now check that every vertex is owned by a process that it is actually connected to. */
3635:   for (i=1; i<=numNonExclusivelyOwned; i++) {
3636:     PetscInt loc = 0;
3637:     PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);
3638:     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3639:     if (loc<0) {
3640:       part[i] = rank;
3641:     }
3642:   }

3644:   /* Let's see how significant the influences of the previous fixing up step was.*/
3645:   if (viewer) {
3646:     PetscViewerASCIIPrintf(viewer, "After.       ");
3647:     DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);
3648:   }

3650:   PetscLayoutDestroy(&layout);
3651:   PetscFree(xadj);
3652:   PetscFree(adjncy);
3653:   PetscFree(vtxwgt);

3655:   /* Almost done, now rewrite the SF to reflect the new ownership. */
3656:   {
3657:     PetscInt *pointsToRewrite;
3658:     PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);
3659:     counter = 0;
3660:     for(i=0; i<pEnd-pStart; i++) {
3661:       if (toBalance[i]) {
3662:         if (isNonExclusivelyOwned[i]) {
3663:           pointsToRewrite[counter] = i + pStart;
3664:           counter++;
3665:         }
3666:       }
3667:     }
3668:     DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);
3669:     PetscFree(pointsToRewrite);
3670:   }

3672:   PetscFree(toBalance);
3673:   PetscFree(isLeaf);
3674:   PetscFree(isNonExclusivelyOwned);
3675:   PetscFree(isExclusivelyOwned);
3676:   PetscFree(part);
3677:   if (viewer) {
3678:     PetscViewerPopFormat(viewer);
3679:     PetscViewerDestroy(&viewer);
3680:   }
3681:   if (success) *success = PETSC_TRUE;
3682:   PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);
3683: #else
3684:   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3685: #endif
3686:   return(0);
3687: }