Actual source code: dm.c

petsc-dev 2014-04-15
Report Typos and Errors
  1: #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
  2: #include <petscsf.h>

  4: PetscClassId  DM_CLASSID;
  5: PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;

  7: const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0};

 11: /*@
 12:   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().

 14:    If you never  call DMSetType()  it will generate an
 15:    error when you try to use the vector.

 17:   Collective on MPI_Comm

 19:   Input Parameter:
 20: . comm - The communicator for the DM object

 22:   Output Parameter:
 23: . dm - The DM object

 25:   Level: beginner

 27: .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
 28: @*/
 29: PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
 30: {
 31:   DM             v;

 36:   *dm = NULL;
 37:   PetscSysInitializePackage();
 38:   VecInitializePackage();
 39:   MatInitializePackage();
 40:   DMInitializePackage();

 42:   PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);
 43:   PetscMemzero(v->ops, sizeof(struct _DMOps));


 46:   v->ltogmap              = NULL;
 47:   v->ltogmapb             = NULL;
 48:   v->bs                   = 1;
 49:   v->coloringtype         = IS_COLORING_GLOBAL;
 50:   PetscSFCreate(comm, &v->sf);
 51:   PetscSFCreate(comm, &v->defaultSF);
 52:   v->defaultSection       = NULL;
 53:   v->defaultGlobalSection = NULL;
 54:   v->L                    = NULL;
 55:   v->maxCell              = NULL;
 56:   {
 57:     PetscInt i;
 58:     for (i = 0; i < 10; ++i) {
 59:       v->nullspaceConstructors[i] = NULL;
 60:     }
 61:   }
 62:   v->numFields = 0;
 63:   v->fields    = NULL;
 64:   v->dmBC      = NULL;
 65:   v->outputSequenceNum = -1;
 66:   DMSetVecType(v,VECSTANDARD);
 67:   DMSetMatType(v,MATAIJ);
 68:   *dm = v;
 69:   return(0);
 70: }

 74: /*@
 75:   DMClone - Creates a DM object with the same topology as the original.

 77:   Collective on MPI_Comm

 79:   Input Parameter:
 80: . dm - The original DM object

 82:   Output Parameter:
 83: . newdm  - The new DM object

 85:   Level: beginner

 87: .keywords: DM, topology, create
 88: @*/
 89: PetscErrorCode DMClone(DM dm, DM *newdm)
 90: {
 91:   PetscSF        sf;
 92:   Vec            coords;
 93:   void          *ctx;

 99:   DMCreate(PetscObjectComm((PetscObject)dm), newdm);
100:   if (dm->ops->clone) {
101:     (*dm->ops->clone)(dm, newdm);
102:   }
103:   (*newdm)->setupcalled = PETSC_TRUE;
104:   DMGetPointSF(dm, &sf);
105:   DMSetPointSF(*newdm, sf);
106:   DMGetApplicationContext(dm, &ctx);
107:   DMSetApplicationContext(*newdm, ctx);
108:   DMGetCoordinatesLocal(dm, &coords);
109:   if (coords) {
110:     DMSetCoordinatesLocal(*newdm, coords);
111:   } else {
112:     DMGetCoordinates(dm, &coords);
113:     if (coords) {DMSetCoordinates(*newdm, coords);}
114:   }
115:   if (dm->maxCell) {
116:     const PetscReal *maxCell, *L;
117:     DMGetPeriodicity(dm,     &maxCell, &L);
118:     DMSetPeriodicity(*newdm,  maxCell,  L);
119:   }
120:   return(0);
121: }

125: /*@C
126:        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()

128:    Logically Collective on DMDA

130:    Input Parameter:
131: +  da - initial distributed array
132: .  ctype - the vector type, currently either VECSTANDARD or VECCUSP

134:    Options Database:
135: .   -dm_vec_type ctype

137:    Level: intermediate

139: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
140: @*/
141: PetscErrorCode  DMSetVecType(DM da,VecType ctype)
142: {

147:   PetscFree(da->vectype);
148:   PetscStrallocpy(ctype,(char**)&da->vectype);
149:   return(0);
150: }

154: /*@C
155:        DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()

157:    Logically Collective on DMDA

159:    Input Parameter:
160: .  da - initial distributed array

162:    Output Parameter:
163: .  ctype - the vector type

165:    Level: intermediate

167: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
168: @*/
169: PetscErrorCode  DMGetVecType(DM da,VecType *ctype)
170: {
173:   *ctype = da->vectype;
174:   return(0);
175: }

179: /*@
180:   VecGetDM - Gets the DM defining the data layout of the vector

182:   Not collective

184:   Input Parameter:
185: . v - The Vec

187:   Output Parameter:
188: . dm - The DM

190:   Level: intermediate

192: .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
193: @*/
194: PetscErrorCode VecGetDM(Vec v, DM *dm)
195: {

201:   PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);
202:   return(0);
203: }

207: /*@
208:   VecSetDM - Sets the DM defining the data layout of the vector

210:   Not collective

212:   Input Parameters:
213: + v - The Vec
214: - dm - The DM

216:   Level: intermediate

218: .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
219: @*/
220: PetscErrorCode VecSetDM(Vec v, DM dm)
221: {

227:   PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);
228:   return(0);
229: }

233: /*@C
234:        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()

236:    Logically Collective on DM

238:    Input Parameter:
239: +  dm - the DM context
240: .  ctype - the matrix type

242:    Options Database:
243: .   -dm_mat_type ctype

245:    Level: intermediate

247: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
248: @*/
249: PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
250: {

255:   PetscFree(dm->mattype);
256:   PetscStrallocpy(ctype,(char**)&dm->mattype);
257:   return(0);
258: }

262: /*@C
263:        DMGetMatType - Gets the type of matrix created with DMCreateMatrix()

265:    Logically Collective on DM

267:    Input Parameter:
268: .  dm - the DM context

270:    Output Parameter:
271: .  ctype - the matrix type

273:    Options Database:
274: .   -dm_mat_type ctype

276:    Level: intermediate

278: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
279: @*/
280: PetscErrorCode  DMGetMatType(DM dm,MatType *ctype)
281: {
284:   *ctype = dm->mattype;
285:   return(0);
286: }

290: /*@
291:   MatGetDM - Gets the DM defining the data layout of the matrix

293:   Not collective

295:   Input Parameter:
296: . A - The Mat

298:   Output Parameter:
299: . dm - The DM

301:   Level: intermediate

303: .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
304: @*/
305: PetscErrorCode MatGetDM(Mat A, DM *dm)
306: {

312:   PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);
313:   return(0);
314: }

318: /*@
319:   MatSetDM - Sets the DM defining the data layout of the matrix

321:   Not collective

323:   Input Parameters:
324: + A - The Mat
325: - dm - The DM

327:   Level: intermediate

329: .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
330: @*/
331: PetscErrorCode MatSetDM(Mat A, DM dm)
332: {

338:   PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);
339:   return(0);
340: }

344: /*@C
345:    DMSetOptionsPrefix - Sets the prefix used for searching for all
346:    DMDA options in the database.

348:    Logically Collective on DMDA

350:    Input Parameter:
351: +  da - the DMDA context
352: -  prefix - the prefix to prepend to all option names

354:    Notes:
355:    A hyphen (-) must NOT be given at the beginning of the prefix name.
356:    The first character of all runtime options is AUTOMATICALLY the hyphen.

358:    Level: advanced

360: .keywords: DMDA, set, options, prefix, database

362: .seealso: DMSetFromOptions()
363: @*/
364: PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
365: {

370:   PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);
371:   return(0);
372: }

376: /*@
377:     DMDestroy - Destroys a vector packer or DMDA.

379:     Collective on DM

381:     Input Parameter:
382: .   dm - the DM object to destroy

384:     Level: developer

386: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

388: @*/
389: PetscErrorCode  DMDestroy(DM *dm)
390: {
391:   PetscInt       i, cnt = 0, f;
392:   DMNamedVecLink nlink,nnext;

396:   if (!*dm) return(0);

399:   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
400:   for (f = 0; f < (*dm)->numFields; ++f) {
401:     PetscObjectCompose((PetscObject) (*dm)->fields[f], "pmat", NULL);
402:     PetscObjectCompose((PetscObject) (*dm)->fields[f], "nullspace", NULL);
403:     PetscObjectCompose((PetscObject) (*dm)->fields[f], "nearnullspace", NULL);
404:   }
405:   /* count all the circular references of DM and its contained Vecs */
406:   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
407:     if ((*dm)->localin[i])  cnt++;
408:     if ((*dm)->globalin[i]) cnt++;
409:   }
410:   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
411:   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
412:   if ((*dm)->x) {
413:     DM obj;
414:     VecGetDM((*dm)->x, &obj);
415:     if (obj == *dm) cnt++;
416:   }

418:   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; return(0);}
419:   /*
420:      Need this test because the dm references the vectors that
421:      reference the dm, so destroying the dm calls destroy on the
422:      vectors that cause another destroy on the dm
423:   */
424:   if (((PetscObject)(*dm))->refct < 0) return(0);
425:   ((PetscObject) (*dm))->refct = 0;
426:   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
427:     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
428:     VecDestroy(&(*dm)->localin[i]);
429:   }
430:   nnext=(*dm)->namedglobal;
431:   (*dm)->namedglobal = NULL;
432:   for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */
433:     nnext = nlink->next;
434:     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
435:     PetscFree(nlink->name);
436:     VecDestroy(&nlink->X);
437:     PetscFree(nlink);
438:   }
439:   nnext=(*dm)->namedlocal;
440:   (*dm)->namedlocal = NULL;
441:   for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */
442:     nnext = nlink->next;
443:     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
444:     PetscFree(nlink->name);
445:     VecDestroy(&nlink->X);
446:     PetscFree(nlink);
447:   }

449:   /* Destroy the list of hooks */
450:   {
451:     DMCoarsenHookLink link,next;
452:     for (link=(*dm)->coarsenhook; link; link=next) {
453:       next = link->next;
454:       PetscFree(link);
455:     }
456:     (*dm)->coarsenhook = NULL;
457:   }
458:   {
459:     DMRefineHookLink link,next;
460:     for (link=(*dm)->refinehook; link; link=next) {
461:       next = link->next;
462:       PetscFree(link);
463:     }
464:     (*dm)->refinehook = NULL;
465:   }
466:   {
467:     DMSubDomainHookLink link,next;
468:     for (link=(*dm)->subdomainhook; link; link=next) {
469:       next = link->next;
470:       PetscFree(link);
471:     }
472:     (*dm)->subdomainhook = NULL;
473:   }
474:   {
475:     DMGlobalToLocalHookLink link,next;
476:     for (link=(*dm)->gtolhook; link; link=next) {
477:       next = link->next;
478:       PetscFree(link);
479:     }
480:     (*dm)->gtolhook = NULL;
481:   }
482:   /* Destroy the work arrays */
483:   {
484:     DMWorkLink link,next;
485:     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
486:     for (link=(*dm)->workin; link; link=next) {
487:       next = link->next;
488:       PetscFree(link->mem);
489:       PetscFree(link);
490:     }
491:     (*dm)->workin = NULL;
492:   }

494:   PetscObjectDestroy(&(*dm)->dmksp);
495:   PetscObjectDestroy(&(*dm)->dmsnes);
496:   PetscObjectDestroy(&(*dm)->dmts);

498:   if ((*dm)->ctx && (*dm)->ctxdestroy) {
499:     (*(*dm)->ctxdestroy)(&(*dm)->ctx);
500:   }
501:   VecDestroy(&(*dm)->x);
502:   MatFDColoringDestroy(&(*dm)->fd);
503:   DMClearGlobalVectors(*dm);
504:   ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);
505:   ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);
506:   PetscFree((*dm)->vectype);
507:   PetscFree((*dm)->mattype);

509:   PetscSectionDestroy(&(*dm)->defaultSection);
510:   PetscSectionDestroy(&(*dm)->defaultGlobalSection);
511:   PetscLayoutDestroy(&(*dm)->map);
512:   PetscSFDestroy(&(*dm)->sf);
513:   PetscSFDestroy(&(*dm)->defaultSF);

515:   DMDestroy(&(*dm)->coordinateDM);
516:   VecDestroy(&(*dm)->coordinates);
517:   VecDestroy(&(*dm)->coordinatesLocal);
518:   PetscFree((*dm)->maxCell);
519:   PetscFree((*dm)->L);

521:   for (f = 0; f < (*dm)->numFields; ++f) {
522:     PetscObjectDestroy((PetscObject *) &(*dm)->fields[f]);
523:   }
524:   PetscFree((*dm)->fields);
525:   DMDestroy(&(*dm)->dmBC);
526:   /* if memory was published with SAWs then destroy it */
527:   PetscObjectSAWsViewOff((PetscObject)*dm);

529:   (*(*dm)->ops->destroy)(*dm);
530:   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
531:   PetscHeaderDestroy(dm);
532:   return(0);
533: }

537: /*@
538:     DMSetUp - sets up the data structures inside a DM object

540:     Collective on DM

542:     Input Parameter:
543: .   dm - the DM object to setup

545:     Level: developer

547: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

549: @*/
550: PetscErrorCode  DMSetUp(DM dm)
551: {

556:   if (dm->setupcalled) return(0);
557:   if (dm->ops->setup) {
558:     (*dm->ops->setup)(dm);
559:   }
560:   dm->setupcalled = PETSC_TRUE;
561:   return(0);
562: }

566: /*@
567:     DMSetFromOptions - sets parameters in a DM from the options database

569:     Collective on DM

571:     Input Parameter:
572: .   dm - the DM object to set options for

574:     Options Database:
575: +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
576: .   -dm_vec_type <type>  type of vector to create inside DM
577: .   -dm_mat_type <type>  type of matrix to create inside DM
578: -   -dm_coloring_type <global or ghosted>

580:     Level: developer

582: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

584: @*/
585: PetscErrorCode  DMSetFromOptions(DM dm)
586: {
587:   char           typeName[256];
588:   PetscBool      flg;

593:   PetscObjectOptionsBegin((PetscObject)dm);
594:   PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);
595:   PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);
596:   if (flg) {
597:     DMSetVecType(dm,typeName);
598:   }
599:   PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);
600:   if (flg) {
601:     DMSetMatType(dm,typeName);
602:   }
603:   PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);
604:   if (dm->ops->setfromoptions) {
605:     (*dm->ops->setfromoptions)(dm);
606:   }
607:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
608:   PetscObjectProcessOptionsHandlers((PetscObject) dm);
609:   PetscOptionsEnd();
610:   return(0);
611: }

615: /*@C
616:     DMView - Views a vector packer or DMDA.

618:     Collective on DM

620:     Input Parameter:
621: +   dm - the DM object to view
622: -   v - the viewer

624:     Level: developer

626: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

628: @*/
629: PetscErrorCode  DMView(DM dm,PetscViewer v)
630: {
632:   PetscBool      isbinary;

636:   if (!v) {
637:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);
638:   }
639:   PetscObjectPrintClassNamePrefixType((PetscObject)dm,v);
640:   PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);
641:   if (isbinary) {
642:     PetscInt classid = DM_FILE_CLASSID;
643:     char     type[256];

645:     PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);
646:     PetscStrncpy(type,((PetscObject)dm)->type_name,256);
647:     PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);
648:   }
649:   if (dm->ops->view) {
650:     (*dm->ops->view)(dm,v);
651:   }
652:   return(0);
653: }

657: /*@
658:     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object

660:     Collective on DM

662:     Input Parameter:
663: .   dm - the DM object

665:     Output Parameter:
666: .   vec - the global vector

668:     Level: beginner

670: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

672: @*/
673: PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
674: {

679:   (*dm->ops->createglobalvector)(dm,vec);
680:   return(0);
681: }

685: /*@
686:     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object

688:     Not Collective

690:     Input Parameter:
691: .   dm - the DM object

693:     Output Parameter:
694: .   vec - the local vector

696:     Level: beginner

698: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

700: @*/
701: PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
702: {

707:   (*dm->ops->createlocalvector)(dm,vec);
708:   return(0);
709: }

713: /*@
714:    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.

716:    Collective on DM

718:    Input Parameter:
719: .  dm - the DM that provides the mapping

721:    Output Parameter:
722: .  ltog - the mapping

724:    Level: intermediate

726:    Notes:
727:    This mapping can then be used by VecSetLocalToGlobalMapping() or
728:    MatSetLocalToGlobalMapping().

730: .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
731: @*/
732: PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
733: {

739:   if (!dm->ltogmap) {
740:     PetscSection section, sectionGlobal;

742:     DMGetDefaultSection(dm, &section);
743:     if (section) {
744:       PetscInt *ltog;
745:       PetscInt pStart, pEnd, size, p, l;

747:       DMGetDefaultGlobalSection(dm, &sectionGlobal);
748:       PetscSectionGetChart(section, &pStart, &pEnd);
749:       PetscSectionGetStorageSize(section, &size);
750:       PetscMalloc1(size, &ltog); /* We want the local+overlap size */
751:       for (p = pStart, l = 0; p < pEnd; ++p) {
752:         PetscInt dof, off, c;

754:         /* Should probably use constrained dofs */
755:         PetscSectionGetDof(section, p, &dof);
756:         PetscSectionGetOffset(sectionGlobal, p, &off);
757:         for (c = 0; c < dof; ++c, ++l) {
758:           ltog[l] = off+c;
759:         }
760:       }
761:       ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);
762:       PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);
763:     } else {
764:       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
765:       (*dm->ops->getlocaltoglobalmapping)(dm);
766:     }
767:   }
768:   *ltog = dm->ltogmap;
769:   return(0);
770: }

774: /*@
775:    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.

777:    Collective on DM

779:    Input Parameter:
780: .  da - the distributed array that provides the mapping

782:    Output Parameter:
783: .  ltog - the block mapping

785:    Level: intermediate

787:    Notes:
788:    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
789:    MatSetLocalToGlobalMappingBlock().

791: .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
792: @*/
793: PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
794: {

800:   if (!dm->ltogmapb) {
801:     PetscInt bs;
802:     DMGetBlockSize(dm,&bs);
803:     if (bs > 1) {
804:       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
805:       (*dm->ops->getlocaltoglobalmappingblock)(dm);
806:     } else {
807:       DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);
808:       PetscObjectReference((PetscObject)dm->ltogmapb);
809:     }
810:   }
811:   *ltog = dm->ltogmapb;
812:   return(0);
813: }

817: /*@
818:    DMGetBlockSize - Gets the inherent block size associated with a DM

820:    Not Collective

822:    Input Parameter:
823: .  dm - the DM with block structure

825:    Output Parameter:
826: .  bs - the block size, 1 implies no exploitable block structure

828:    Level: intermediate

830: .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
831: @*/
832: PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
833: {
837:   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
838:   *bs = dm->bs;
839:   return(0);
840: }

844: /*@
845:     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects

847:     Collective on DM

849:     Input Parameter:
850: +   dm1 - the DM object
851: -   dm2 - the second, finer DM object

853:     Output Parameter:
854: +  mat - the interpolation
855: -  vec - the scaling (optional)

857:     Level: developer

859:     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
860:         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.

862:         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
863:         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.


866: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()

868: @*/
869: PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
870: {

876:   (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);
877:   return(0);
878: }

882: /*@
883:     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects

885:     Collective on DM

887:     Input Parameter:
888: +   dm1 - the DM object
889: -   dm2 - the second, finer DM object

891:     Output Parameter:
892: .   ctx - the injection

894:     Level: developer

896:    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
897:         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.

899: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()

901: @*/
902: PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
903: {

909:   (*dm1->ops->getinjection)(dm1,dm2,ctx);
910:   return(0);
911: }

915: /*@
916:     DMCreateColoring - Gets coloring for a DM

918:     Collective on DM

920:     Input Parameter:
921: +   dm - the DM object
922: -   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL

924:     Output Parameter:
925: .   coloring - the coloring

927:     Level: developer

929: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()

931: @*/
932: PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
933: {

938:   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
939:   (*dm->ops->getcoloring)(dm,ctype,coloring);
940:   return(0);
941: }

945: /*@
946:     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite

948:     Collective on DM

950:     Input Parameter:
951: .   dm - the DM object

953:     Output Parameter:
954: .   mat - the empty Jacobian

956:     Level: beginner

958:     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
959:        do not need to do it yourself.

961:        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
962:        the nonzero pattern call DMDASetMatPreallocateOnly()

964:        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
965:        internally by PETSc.

967:        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
968:        the indices for the global numbering for DMDAs which is complicated.

970: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()

972: @*/
973: PetscErrorCode  DMCreateMatrix(DM dm,Mat *mat)
974: {

979:   MatInitializePackage();
982:   (*dm->ops->creatematrix)(dm,mat);
983:   return(0);
984: }

988: /*@
989:   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
990:     preallocated but the nonzero structure and zero values will not be set.

992:   Logically Collective on DMDA

994:   Input Parameter:
995: + dm - the DM
996: - only - PETSC_TRUE if only want preallocation

998:   Level: developer
999: .seealso DMCreateMatrix()
1000: @*/
1001: PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
1002: {
1005:   dm->prealloc_only = only;
1006:   return(0);
1007: }

1011: /*@C
1012:   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()

1014:   Not Collective

1016:   Input Parameters:
1017: + dm - the DM object
1018: . count - The minium size
1019: - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)

1021:   Output Parameter:
1022: . array - the work array

1024:   Level: developer

1026: .seealso DMDestroy(), DMCreate()
1027: @*/
1028: PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1029: {
1031:   DMWorkLink     link;
1032:   size_t         size;

1037:   if (dm->workin) {
1038:     link       = dm->workin;
1039:     dm->workin = dm->workin->next;
1040:   } else {
1041:     PetscNewLog(dm,&link);
1042:   }
1043:   PetscDataTypeGetSize(dtype,&size);
1044:   if (size*count > link->bytes) {
1045:     PetscFree(link->mem);
1046:     PetscMalloc(size*count,&link->mem);
1047:     link->bytes = size*count;
1048:   }
1049:   link->next   = dm->workout;
1050:   dm->workout  = link;
1051:   *(void**)mem = link->mem;
1052:   return(0);
1053: }

1057: /*@C
1058:   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()

1060:   Not Collective

1062:   Input Parameters:
1063: + dm - the DM object
1064: . count - The minium size
1065: - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)

1067:   Output Parameter:
1068: . array - the work array

1070:   Level: developer

1072: .seealso DMDestroy(), DMCreate()
1073: @*/
1074: PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1075: {
1076:   DMWorkLink *p,link;

1081:   for (p=&dm->workout; (link=*p); p=&link->next) {
1082:     if (link->mem == *(void**)mem) {
1083:       *p           = link->next;
1084:       link->next   = dm->workin;
1085:       dm->workin   = link;
1086:       *(void**)mem = NULL;
1087:       return(0);
1088:     }
1089:   }
1090:   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1091:   return(0);
1092: }

1096: PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1097: {
1100:   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1101:   dm->nullspaceConstructors[field] = nullsp;
1102:   return(0);
1103: }

1107: /*@C
1108:   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field

1110:   Not collective

1112:   Input Parameter:
1113: . dm - the DM object

1115:   Output Parameters:
1116: + numFields  - The number of fields (or NULL if not requested)
1117: . fieldNames - The name for each field (or NULL if not requested)
1118: - fields     - The global indices for each field (or NULL if not requested)

1120:   Level: intermediate

1122:   Notes:
1123:   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1124:   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1125:   PetscFree().

1127: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1128: @*/
1129: PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1130: {
1131:   PetscSection   section, sectionGlobal;

1136:   if (numFields) {
1138:     *numFields = 0;
1139:   }
1140:   if (fieldNames) {
1142:     *fieldNames = NULL;
1143:   }
1144:   if (fields) {
1146:     *fields = NULL;
1147:   }
1148:   DMGetDefaultSection(dm, &section);
1149:   if (section) {
1150:     PetscInt *fieldSizes, **fieldIndices;
1151:     PetscInt nF, f, pStart, pEnd, p;

1153:     DMGetDefaultGlobalSection(dm, &sectionGlobal);
1154:     PetscSectionGetNumFields(section, &nF);
1155:     PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);
1156:     PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);
1157:     for (f = 0; f < nF; ++f) {
1158:       fieldSizes[f] = 0;
1159:     }
1160:     for (p = pStart; p < pEnd; ++p) {
1161:       PetscInt gdof;

1163:       PetscSectionGetDof(sectionGlobal, p, &gdof);
1164:       if (gdof > 0) {
1165:         for (f = 0; f < nF; ++f) {
1166:           PetscInt fdof, fcdof;

1168:           PetscSectionGetFieldDof(section, p, f, &fdof);
1169:           PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);
1170:           fieldSizes[f] += fdof-fcdof;
1171:         }
1172:       }
1173:     }
1174:     for (f = 0; f < nF; ++f) {
1175:       PetscMalloc1(fieldSizes[f], &fieldIndices[f]);
1176:       fieldSizes[f] = 0;
1177:     }
1178:     for (p = pStart; p < pEnd; ++p) {
1179:       PetscInt gdof, goff;

1181:       PetscSectionGetDof(sectionGlobal, p, &gdof);
1182:       if (gdof > 0) {
1183:         PetscSectionGetOffset(sectionGlobal, p, &goff);
1184:         for (f = 0; f < nF; ++f) {
1185:           PetscInt fdof, fcdof, fc;

1187:           PetscSectionGetFieldDof(section, p, f, &fdof);
1188:           PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);
1189:           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1190:             fieldIndices[f][fieldSizes[f]] = goff++;
1191:           }
1192:         }
1193:       }
1194:     }
1195:     if (numFields) *numFields = nF;
1196:     if (fieldNames) {
1197:       PetscMalloc1(nF, fieldNames);
1198:       for (f = 0; f < nF; ++f) {
1199:         const char *fieldName;

1201:         PetscSectionGetFieldName(section, f, &fieldName);
1202:         PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);
1203:       }
1204:     }
1205:     if (fields) {
1206:       PetscMalloc1(nF, fields);
1207:       for (f = 0; f < nF; ++f) {
1208:         ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);
1209:       }
1210:     }
1211:     PetscFree2(fieldSizes,fieldIndices);
1212:   } else if (dm->ops->createfieldis) {
1213:     (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);
1214:   }
1215:   return(0);
1216: }


1221: /*@C
1222:   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1223:                           corresponding to different fields: each IS contains the global indices of the dofs of the
1224:                           corresponding field. The optional list of DMs define the DM for each subproblem.
1225:                           Generalizes DMCreateFieldIS().

1227:   Not collective

1229:   Input Parameter:
1230: . dm - the DM object

1232:   Output Parameters:
1233: + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1234: . namelist  - The name for each field (or NULL if not requested)
1235: . islist    - The global indices for each field (or NULL if not requested)
1236: - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)

1238:   Level: intermediate

1240:   Notes:
1241:   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1242:   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1243:   and all of the arrays should be freed with PetscFree().

1245: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1246: @*/
1247: PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1248: {

1253:   if (len) {
1255:     *len = 0;
1256:   }
1257:   if (namelist) {
1259:     *namelist = 0;
1260:   }
1261:   if (islist) {
1263:     *islist = 0;
1264:   }
1265:   if (dmlist) {
1267:     *dmlist = 0;
1268:   }
1269:   /*
1270:    Is it a good idea to apply the following check across all impls?
1271:    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1272:    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1273:    */
1274:   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1275:   if (!dm->ops->createfielddecomposition) {
1276:     PetscSection section;
1277:     PetscInt     numFields, f;

1279:     DMGetDefaultSection(dm, &section);
1280:     if (section) {PetscSectionGetNumFields(section, &numFields);}
1281:     if (section && numFields && dm->ops->createsubdm) {
1282:       *len = numFields;
1283:       if (namelist) {PetscMalloc1(numFields,namelist);}
1284:       if (islist)   {PetscMalloc1(numFields,islist);}
1285:       if (dmlist)   {PetscMalloc1(numFields,dmlist);}
1286:       for (f = 0; f < numFields; ++f) {
1287:         const char *fieldName;

1289:         DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);
1290:         if (namelist) {
1291:           PetscSectionGetFieldName(section, f, &fieldName);
1292:           PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);
1293:         }
1294:       }
1295:     } else {
1296:       DMCreateFieldIS(dm, len, namelist, islist);
1297:       /* By default there are no DMs associated with subproblems. */
1298:       if (dmlist) *dmlist = NULL;
1299:     }
1300:   } else {
1301:     (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);
1302:   }
1303:   return(0);
1304: }

1308: /*@C
1309:   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1310:                   The fields are defined by DMCreateFieldIS().

1312:   Not collective

1314:   Input Parameters:
1315: + dm - the DM object
1316: . numFields - number of fields in this subproblem
1317: - len       - The number of subproblems in the decomposition (or NULL if not requested)

1319:   Output Parameters:
1320: . is - The global indices for the subproblem
1321: - dm - The DM for the subproblem

1323:   Level: intermediate

1325: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1326: @*/
1327: PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1328: {

1336:   if (dm->ops->createsubdm) {
1337:     (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);
1338:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1339:   return(0);
1340: }


1345: /*@C
1346:   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1347:                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1348:                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1349:                           define a nonoverlapping covering, while outer subdomains can overlap.
1350:                           The optional list of DMs define the DM for each subproblem.

1352:   Not collective

1354:   Input Parameter:
1355: . dm - the DM object

1357:   Output Parameters:
1358: + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1359: . namelist    - The name for each subdomain (or NULL if not requested)
1360: . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1361: . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1362: - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)

1364:   Level: intermediate

1366:   Notes:
1367:   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1368:   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1369:   and all of the arrays should be freed with PetscFree().

1371: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1372: @*/
1373: PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1374: {
1375:   PetscErrorCode      ierr;
1376:   DMSubDomainHookLink link;
1377:   PetscInt            i,l;

1386:   /*
1387:    Is it a good idea to apply the following check across all impls?
1388:    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1389:    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1390:    */
1391:   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1392:   if (dm->ops->createdomaindecomposition) {
1393:     (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);
1394:     /* copy subdomain hooks and context over to the subdomain DMs */
1395:     if (dmlist) {
1396:       for (i = 0; i < l; i++) {
1397:         for (link=dm->subdomainhook; link; link=link->next) {
1398:           if (link->ddhook) {(*link->ddhook)(dm,(*dmlist)[i],link->ctx);}
1399:         }
1400:         (*dmlist)[i]->ctx = dm->ctx;
1401:       }
1402:     }
1403:     if (len) *len = l;
1404:   }
1405:   return(0);
1406: }


1411: /*@C
1412:   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector

1414:   Not collective

1416:   Input Parameters:
1417: + dm - the DM object
1418: . n  - the number of subdomain scatters
1419: - subdms - the local subdomains

1421:   Output Parameters:
1422: + n     - the number of scatters returned
1423: . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1424: . oscat - scatter from global vector to overlapping global vector entries on subdomain
1425: - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)

1427:   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1428:   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1429:   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1430:   solution and residual data.

1432:   Level: developer

1434: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1435: @*/
1436: PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1437: {

1443:   if (dm->ops->createddscatters) {
1444:     (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);
1445:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1446:   return(0);
1447: }

1451: /*@
1452:   DMRefine - Refines a DM object

1454:   Collective on DM

1456:   Input Parameter:
1457: + dm   - the DM object
1458: - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)

1460:   Output Parameter:
1461: . dmf - the refined DM, or NULL

1463:   Note: If no refinement was done, the return value is NULL

1465:   Level: developer

1467: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1468: @*/
1469: PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1470: {
1471:   PetscErrorCode   ierr;
1472:   DMRefineHookLink link;

1476:   (*dm->ops->refine)(dm,comm,dmf);
1477:   if (*dmf) {
1478:     (*dmf)->ops->creatematrix = dm->ops->creatematrix;

1480:     PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);

1482:     (*dmf)->ctx       = dm->ctx;
1483:     (*dmf)->leveldown = dm->leveldown;
1484:     (*dmf)->levelup   = dm->levelup + 1;

1486:     DMSetMatType(*dmf,dm->mattype);
1487:     for (link=dm->refinehook; link; link=link->next) {
1488:       if (link->refinehook) {
1489:         (*link->refinehook)(dm,*dmf,link->ctx);
1490:       }
1491:     }
1492:   }
1493:   return(0);
1494: }

1498: /*@C
1499:    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid

1501:    Logically Collective

1503:    Input Arguments:
1504: +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1505: .  refinehook - function to run when setting up a coarser level
1506: .  interphook - function to run to update data on finer levels (once per SNESSolve())
1507: -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)

1509:    Calling sequence of refinehook:
1510: $    refinehook(DM coarse,DM fine,void *ctx);

1512: +  coarse - coarse level DM
1513: .  fine - fine level DM to interpolate problem to
1514: -  ctx - optional user-defined function context

1516:    Calling sequence for interphook:
1517: $    interphook(DM coarse,Mat interp,DM fine,void *ctx)

1519: +  coarse - coarse level DM
1520: .  interp - matrix interpolating a coarse-level solution to the finer grid
1521: .  fine - fine level DM to update
1522: -  ctx - optional user-defined function context

1524:    Level: advanced

1526:    Notes:
1527:    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing

1529:    If this function is called multiple times, the hooks will be run in the order they are added.

1531:    This function is currently not available from Fortran.

1533: .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1534: @*/
1535: PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1536: {
1537:   PetscErrorCode   ierr;
1538:   DMRefineHookLink link,*p;

1542:   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1543:   PetscMalloc(sizeof(struct _DMRefineHookLink),&link);
1544:   link->refinehook = refinehook;
1545:   link->interphook = interphook;
1546:   link->ctx        = ctx;
1547:   link->next       = NULL;
1548:   *p               = link;
1549:   return(0);
1550: }

1554: /*@
1555:    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()

1557:    Collective if any hooks are

1559:    Input Arguments:
1560: +  coarse - coarser DM to use as a base
1561: .  restrct - interpolation matrix, apply using MatInterpolate()
1562: -  fine - finer DM to update

1564:    Level: developer

1566: .seealso: DMRefineHookAdd(), MatInterpolate()
1567: @*/
1568: PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1569: {
1570:   PetscErrorCode   ierr;
1571:   DMRefineHookLink link;

1574:   for (link=fine->refinehook; link; link=link->next) {
1575:     if (link->interphook) {
1576:       (*link->interphook)(coarse,interp,fine,link->ctx);
1577:     }
1578:   }
1579:   return(0);
1580: }

1584: /*@
1585:     DMGetRefineLevel - Get's the number of refinements that have generated this DM.

1587:     Not Collective

1589:     Input Parameter:
1590: .   dm - the DM object

1592:     Output Parameter:
1593: .   level - number of refinements

1595:     Level: developer

1597: .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()

1599: @*/
1600: PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1601: {
1604:   *level = dm->levelup;
1605:   return(0);
1606: }

1610: /*@C
1611:    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called

1613:    Logically Collective

1615:    Input Arguments:
1616: +  dm - the DM
1617: .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1618: .  endhook - function to run after DMGlobalToLocalEnd() has completed
1619: -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)

1621:    Calling sequence for beginhook:
1622: $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)

1624: +  dm - global DM
1625: .  g - global vector
1626: .  mode - mode
1627: .  l - local vector
1628: -  ctx - optional user-defined function context


1631:    Calling sequence for endhook:
1632: $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)

1634: +  global - global DM
1635: -  ctx - optional user-defined function context

1637:    Level: advanced

1639: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1640: @*/
1641: PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1642: {
1643:   PetscErrorCode          ierr;
1644:   DMGlobalToLocalHookLink link,*p;

1648:   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1649:   PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);
1650:   link->beginhook = beginhook;
1651:   link->endhook   = endhook;
1652:   link->ctx       = ctx;
1653:   link->next      = NULL;
1654:   *p              = link;
1655:   return(0);
1656: }

1660: /*@
1661:     DMGlobalToLocalBegin - Begins updating local vectors from global vector

1663:     Neighbor-wise Collective on DM

1665:     Input Parameters:
1666: +   dm - the DM object
1667: .   g - the global vector
1668: .   mode - INSERT_VALUES or ADD_VALUES
1669: -   l - the local vector


1672:     Level: beginner

1674: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()

1676: @*/
1677: PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1678: {
1679:   PetscSF                 sf;
1680:   PetscErrorCode          ierr;
1681:   DMGlobalToLocalHookLink link;

1685:   for (link=dm->gtolhook; link; link=link->next) {
1686:     if (link->beginhook) {
1687:       (*link->beginhook)(dm,g,mode,l,link->ctx);
1688:     }
1689:   }
1690:   DMGetDefaultSF(dm, &sf);
1691:   if (sf) {
1692:     PetscScalar *lArray, *gArray;

1694:     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1695:     VecGetArray(l, &lArray);
1696:     VecGetArray(g, &gArray);
1697:     PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);
1698:     VecRestoreArray(l, &lArray);
1699:     VecRestoreArray(g, &gArray);
1700:   } else {
1701:     (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1702:   }
1703:   return(0);
1704: }

1708: /*@
1709:     DMGlobalToLocalEnd - Ends updating local vectors from global vector

1711:     Neighbor-wise Collective on DM

1713:     Input Parameters:
1714: +   dm - the DM object
1715: .   g - the global vector
1716: .   mode - INSERT_VALUES or ADD_VALUES
1717: -   l - the local vector


1720:     Level: beginner

1722: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()

1724: @*/
1725: PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1726: {
1727:   PetscSF                 sf;
1728:   PetscErrorCode          ierr;
1729:   PetscScalar             *lArray, *gArray;
1730:   DMGlobalToLocalHookLink link;

1734:   DMGetDefaultSF(dm, &sf);
1735:   if (sf) {
1736:     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);

1738:     VecGetArray(l, &lArray);
1739:     VecGetArray(g, &gArray);
1740:     PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);
1741:     VecRestoreArray(l, &lArray);
1742:     VecRestoreArray(g, &gArray);
1743:   } else {
1744:     (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1745:   }
1746:   for (link=dm->gtolhook; link; link=link->next) {
1747:     if (link->endhook) {(*link->endhook)(dm,g,mode,l,link->ctx);}
1748:   }
1749:   return(0);
1750: }

1754: /*@
1755:     DMLocalToGlobalBegin - updates global vectors from local vectors

1757:     Neighbor-wise Collective on DM

1759:     Input Parameters:
1760: +   dm - the DM object
1761: .   l - the local vector
1762: .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1763:            base point.
1764: - - the global vector

1766:     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1767:            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1768:            global array to the final global array with VecAXPY().

1770:     Level: beginner

1772: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()

1774: @*/
1775: PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1776: {
1777:   PetscSF        sf;

1782:   DMGetDefaultSF(dm, &sf);
1783:   if (sf) {
1784:     MPI_Op      op;
1785:     PetscScalar *lArray, *gArray;

1787:     switch (mode) {
1788:     case INSERT_VALUES:
1789:     case INSERT_ALL_VALUES:
1790:       op = MPIU_REPLACE; break;
1791:     case ADD_VALUES:
1792:     case ADD_ALL_VALUES:
1793:       op = MPI_SUM; break;
1794:     default:
1795:       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1796:     }
1797:     VecGetArray(l, &lArray);
1798:     VecGetArray(g, &gArray);
1799:     PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);
1800:     VecRestoreArray(l, &lArray);
1801:     VecRestoreArray(g, &gArray);
1802:   } else {
1803:     (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);
1804:   }
1805:   return(0);
1806: }

1810: /*@
1811:     DMLocalToGlobalEnd - updates global vectors from local vectors

1813:     Neighbor-wise Collective on DM

1815:     Input Parameters:
1816: +   dm - the DM object
1817: .   l - the local vector
1818: .   mode - INSERT_VALUES or ADD_VALUES
1819: -   g - the global vector


1822:     Level: beginner

1824: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()

1826: @*/
1827: PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1828: {
1829:   PetscSF        sf;

1834:   DMGetDefaultSF(dm, &sf);
1835:   if (sf) {
1836:     MPI_Op      op;
1837:     PetscScalar *lArray, *gArray;

1839:     switch (mode) {
1840:     case INSERT_VALUES:
1841:     case INSERT_ALL_VALUES:
1842:       op = MPIU_REPLACE; break;
1843:     case ADD_VALUES:
1844:     case ADD_ALL_VALUES:
1845:       op = MPI_SUM; break;
1846:     default:
1847:       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1848:     }
1849:     VecGetArray(l, &lArray);
1850:     VecGetArray(g, &gArray);
1851:     PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);
1852:     VecRestoreArray(l, &lArray);
1853:     VecRestoreArray(g, &gArray);
1854:   } else {
1855:     (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);
1856:   }
1857:   return(0);
1858: }

1862: /*@
1863:    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1864:    that contain irrelevant values) to another local vector where the ghost
1865:    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().

1867:    Neighbor-wise Collective on DM and Vec

1869:    Input Parameters:
1870: +  dm - the DM object
1871: .  g - the original local vector
1872: -  mode - one of INSERT_VALUES or ADD_VALUES

1874:    Output Parameter:
1875: .  l  - the local vector with correct ghost values

1877:    Level: intermediate

1879:    Notes:
1880:    The local vectors used here need not be the same as those
1881:    obtained from DMCreateLocalVector(), BUT they
1882:    must have the same parallel data layout; they could, for example, be
1883:    obtained with VecDuplicate() from the DM originating vectors.

1885: .keywords: DM, local-to-local, begin
1886: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()

1888: @*/
1889: PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1890: {
1891:   PetscErrorCode          ierr;

1895:   (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1896:   return(0);
1897: }

1901: /*@
1902:    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1903:    that contain irrelevant values) to another local vector where the ghost
1904:    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().

1906:    Neighbor-wise Collective on DM and Vec

1908:    Input Parameters:
1909: +  da - the DM object
1910: .  g - the original local vector
1911: -  mode - one of INSERT_VALUES or ADD_VALUES

1913:    Output Parameter:
1914: .  l  - the local vector with correct ghost values

1916:    Level: intermediate

1918:    Notes:
1919:    The local vectors used here need not be the same as those
1920:    obtained from DMCreateLocalVector(), BUT they
1921:    must have the same parallel data layout; they could, for example, be
1922:    obtained with VecDuplicate() from the DM originating vectors.

1924: .keywords: DM, local-to-local, end
1925: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()

1927: @*/
1928: PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1929: {
1930:   PetscErrorCode          ierr;

1934:   (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1935:   return(0);
1936: }


1941: /*@
1942:     DMCoarsen - Coarsens a DM object

1944:     Collective on DM

1946:     Input Parameter:
1947: +   dm - the DM object
1948: -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)

1950:     Output Parameter:
1951: .   dmc - the coarsened DM

1953:     Level: developer

1955: .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()

1957: @*/
1958: PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1959: {
1960:   PetscErrorCode    ierr;
1961:   DMCoarsenHookLink link;

1965:   (*dm->ops->coarsen)(dm, comm, dmc);
1966:   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1967:   PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);
1968:   (*dmc)->ctx               = dm->ctx;
1969:   (*dmc)->levelup           = dm->levelup;
1970:   (*dmc)->leveldown         = dm->leveldown + 1;
1971:   DMSetMatType(*dmc,dm->mattype);
1972:   for (link=dm->coarsenhook; link; link=link->next) {
1973:     if (link->coarsenhook) {(*link->coarsenhook)(dm,*dmc,link->ctx);}
1974:   }
1975:   return(0);
1976: }

1980: /*@C
1981:    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid

1983:    Logically Collective

1985:    Input Arguments:
1986: +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1987: .  coarsenhook - function to run when setting up a coarser level
1988: .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1989: -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)

1991:    Calling sequence of coarsenhook:
1992: $    coarsenhook(DM fine,DM coarse,void *ctx);

1994: +  fine - fine level DM
1995: .  coarse - coarse level DM to restrict problem to
1996: -  ctx - optional user-defined function context

1998:    Calling sequence for restricthook:
1999: $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)

2001: +  fine - fine level DM
2002: .  mrestrict - matrix restricting a fine-level solution to the coarse grid
2003: .  rscale - scaling vector for restriction
2004: .  inject - matrix restricting by injection
2005: .  coarse - coarse level DM to update
2006: -  ctx - optional user-defined function context

2008:    Level: advanced

2010:    Notes:
2011:    This function is only needed if auxiliary data needs to be set up on coarse grids.

2013:    If this function is called multiple times, the hooks will be run in the order they are added.

2015:    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2016:    extract the finest level information from its context (instead of from the SNES).

2018:    This function is currently not available from Fortran.

2020: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2021: @*/
2022: PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
2023: {
2024:   PetscErrorCode    ierr;
2025:   DMCoarsenHookLink link,*p;

2029:   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2030:   PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);
2031:   link->coarsenhook  = coarsenhook;
2032:   link->restricthook = restricthook;
2033:   link->ctx          = ctx;
2034:   link->next         = NULL;
2035:   *p                 = link;
2036:   return(0);
2037: }

2041: /*@
2042:    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()

2044:    Collective if any hooks are

2046:    Input Arguments:
2047: +  fine - finer DM to use as a base
2048: .  restrct - restriction matrix, apply using MatRestrict()
2049: .  inject - injection matrix, also use MatRestrict()
2050: -  coarse - coarer DM to update

2052:    Level: developer

2054: .seealso: DMCoarsenHookAdd(), MatRestrict()
2055: @*/
2056: PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2057: {
2058:   PetscErrorCode    ierr;
2059:   DMCoarsenHookLink link;

2062:   for (link=fine->coarsenhook; link; link=link->next) {
2063:     if (link->restricthook) {
2064:       (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);
2065:     }
2066:   }
2067:   return(0);
2068: }

2072: /*@C
2073:    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid

2075:    Logically Collective

2077:    Input Arguments:
2078: +  global - global DM
2079: .  ddhook - function to run to pass data to the decomposition DM upon its creation
2080: .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2081: -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)


2084:    Calling sequence for ddhook:
2085: $    ddhook(DM global,DM block,void *ctx)

2087: +  global - global DM
2088: .  block  - block DM
2089: -  ctx - optional user-defined function context

2091:    Calling sequence for restricthook:
2092: $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)

2094: +  global - global DM
2095: .  out    - scatter to the outer (with ghost and overlap points) block vector
2096: .  in     - scatter to block vector values only owned locally
2097: .  block  - block DM
2098: -  ctx - optional user-defined function context

2100:    Level: advanced

2102:    Notes:
2103:    This function is only needed if auxiliary data needs to be set up on subdomain DMs.

2105:    If this function is called multiple times, the hooks will be run in the order they are added.

2107:    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2108:    extract the global information from its context (instead of from the SNES).

2110:    This function is currently not available from Fortran.

2112: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2113: @*/
2114: PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2115: {
2116:   PetscErrorCode      ierr;
2117:   DMSubDomainHookLink link,*p;

2121:   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2122:   PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);
2123:   link->restricthook = restricthook;
2124:   link->ddhook       = ddhook;
2125:   link->ctx          = ctx;
2126:   link->next         = NULL;
2127:   *p                 = link;
2128:   return(0);
2129: }

2133: /*@
2134:    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()

2136:    Collective if any hooks are

2138:    Input Arguments:
2139: +  fine - finer DM to use as a base
2140: .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2141: .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2142: -  coarse - coarer DM to update

2144:    Level: developer

2146: .seealso: DMCoarsenHookAdd(), MatRestrict()
2147: @*/
2148: PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2149: {
2150:   PetscErrorCode      ierr;
2151:   DMSubDomainHookLink link;

2154:   for (link=global->subdomainhook; link; link=link->next) {
2155:     if (link->restricthook) {
2156:       (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);
2157:     }
2158:   }
2159:   return(0);
2160: }

2164: /*@
2165:     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.

2167:     Not Collective

2169:     Input Parameter:
2170: .   dm - the DM object

2172:     Output Parameter:
2173: .   level - number of coarsenings

2175:     Level: developer

2177: .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()

2179: @*/
2180: PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2181: {
2184:   *level = dm->leveldown;
2185:   return(0);
2186: }



2192: /*@C
2193:     DMRefineHierarchy - Refines a DM object, all levels at once

2195:     Collective on DM

2197:     Input Parameter:
2198: +   dm - the DM object
2199: -   nlevels - the number of levels of refinement

2201:     Output Parameter:
2202: .   dmf - the refined DM hierarchy

2204:     Level: developer

2206: .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()

2208: @*/
2209: PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2210: {

2215:   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2216:   if (nlevels == 0) return(0);
2217:   if (dm->ops->refinehierarchy) {
2218:     (*dm->ops->refinehierarchy)(dm,nlevels,dmf);
2219:   } else if (dm->ops->refine) {
2220:     PetscInt i;

2222:     DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);
2223:     for (i=1; i<nlevels; i++) {
2224:       DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);
2225:     }
2226:   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2227:   return(0);
2228: }

2232: /*@C
2233:     DMCoarsenHierarchy - Coarsens a DM object, all levels at once

2235:     Collective on DM

2237:     Input Parameter:
2238: +   dm - the DM object
2239: -   nlevels - the number of levels of coarsening

2241:     Output Parameter:
2242: .   dmc - the coarsened DM hierarchy

2244:     Level: developer

2246: .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()

2248: @*/
2249: PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2250: {

2255:   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2256:   if (nlevels == 0) return(0);
2258:   if (dm->ops->coarsenhierarchy) {
2259:     (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);
2260:   } else if (dm->ops->coarsen) {
2261:     PetscInt i;

2263:     DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);
2264:     for (i=1; i<nlevels; i++) {
2265:       DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);
2266:     }
2267:   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2268:   return(0);
2269: }

2273: /*@
2274:    DMCreateAggregates - Gets the aggregates that map between
2275:    grids associated with two DMs.

2277:    Collective on DM

2279:    Input Parameters:
2280: +  dmc - the coarse grid DM
2281: -  dmf - the fine grid DM

2283:    Output Parameters:
2284: .  rest - the restriction matrix (transpose of the projection matrix)

2286:    Level: intermediate

2288: .keywords: interpolation, restriction, multigrid

2290: .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2291: @*/
2292: PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2293: {

2299:   (*dmc->ops->getaggregates)(dmc, dmf, rest);
2300:   return(0);
2301: }

2305: /*@C
2306:     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed

2308:     Not Collective

2310:     Input Parameters:
2311: +   dm - the DM object
2312: -   destroy - the destroy function

2314:     Level: intermediate

2316: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2318: @*/
2319: PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2320: {
2323:   dm->ctxdestroy = destroy;
2324:   return(0);
2325: }

2329: /*@
2330:     DMSetApplicationContext - Set a user context into a DM object

2332:     Not Collective

2334:     Input Parameters:
2335: +   dm - the DM object
2336: -   ctx - the user context

2338:     Level: intermediate

2340: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2342: @*/
2343: PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2344: {
2347:   dm->ctx = ctx;
2348:   return(0);
2349: }

2353: /*@
2354:     DMGetApplicationContext - Gets a user context from a DM object

2356:     Not Collective

2358:     Input Parameter:
2359: .   dm - the DM object

2361:     Output Parameter:
2362: .   ctx - the user context

2364:     Level: intermediate

2366: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2368: @*/
2369: PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2370: {
2373:   *(void**)ctx = dm->ctx;
2374:   return(0);
2375: }

2379: /*@C
2380:     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.

2382:     Logically Collective on DM

2384:     Input Parameter:
2385: +   dm - the DM object
2386: -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)

2388:     Level: intermediate

2390: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2391:          DMSetJacobian()

2393: @*/
2394: PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2395: {
2397:   dm->ops->computevariablebounds = f;
2398:   return(0);
2399: }

2403: /*@
2404:     DMHasVariableBounds - does the DM object have a variable bounds function?

2406:     Not Collective

2408:     Input Parameter:
2409: .   dm - the DM object to destroy

2411:     Output Parameter:
2412: .   flg - PETSC_TRUE if the variable bounds function exists

2414:     Level: developer

2416: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2418: @*/
2419: PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2420: {
2422:   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2423:   return(0);
2424: }

2428: /*@C
2429:     DMComputeVariableBounds - compute variable bounds used by SNESVI.

2431:     Logically Collective on DM

2433:     Input Parameters:
2434: +   dm - the DM object to destroy
2435: -   x  - current solution at which the bounds are computed

2437:     Output parameters:
2438: +   xl - lower bound
2439: -   xu - upper bound

2441:     Level: intermediate

2443: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2445: @*/
2446: PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2447: {

2453:   if (dm->ops->computevariablebounds) {
2454:     (*dm->ops->computevariablebounds)(dm, xl,xu);
2455:   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2456:   return(0);
2457: }

2461: /*@
2462:     DMHasColoring - does the DM object have a method of providing a coloring?

2464:     Not Collective

2466:     Input Parameter:
2467: .   dm - the DM object

2469:     Output Parameter:
2470: .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().

2472:     Level: developer

2474: .seealso DMHasFunction(), DMCreateColoring()

2476: @*/
2477: PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2478: {
2480:   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2481:   return(0);
2482: }

2484: #undef  __FUNCT__
2486: /*@C
2487:     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.

2489:     Collective on DM

2491:     Input Parameter:
2492: +   dm - the DM object
2493: -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.

2495:     Level: developer

2497: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2499: @*/
2500: PetscErrorCode  DMSetVec(DM dm,Vec x)
2501: {

2505:   if (x) {
2506:     if (!dm->x) {
2507:       DMCreateGlobalVector(dm,&dm->x);
2508:     }
2509:     VecCopy(x,dm->x);
2510:   } else if (dm->x) {
2511:     VecDestroy(&dm->x);
2512:   }
2513:   return(0);
2514: }

2516: PetscFunctionList DMList              = NULL;
2517: PetscBool         DMRegisterAllCalled = PETSC_FALSE;

2521: /*@C
2522:   DMSetType - Builds a DM, for a particular DM implementation.

2524:   Collective on DM

2526:   Input Parameters:
2527: + dm     - The DM object
2528: - method - The name of the DM type

2530:   Options Database Key:
2531: . -dm_type <type> - Sets the DM type; use -help for a list of available types

2533:   Notes:
2534:   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).

2536:   Level: intermediate

2538: .keywords: DM, set, type
2539: .seealso: DMGetType(), DMCreate()
2540: @*/
2541: PetscErrorCode  DMSetType(DM dm, DMType method)
2542: {
2543:   PetscErrorCode (*r)(DM);
2544:   PetscBool      match;

2549:   PetscObjectTypeCompare((PetscObject) dm, method, &match);
2550:   if (match) return(0);

2552:   if (!DMRegisterAllCalled) {DMRegisterAll();}
2553:   PetscFunctionListFind(DMList,method,&r);
2554:   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);

2556:   if (dm->ops->destroy) {
2557:     (*dm->ops->destroy)(dm);
2558:     dm->ops->destroy = NULL;
2559:   }
2560:   (*r)(dm);
2561:   PetscObjectChangeTypeName((PetscObject)dm,method);
2562:   return(0);
2563: }

2567: /*@C
2568:   DMGetType - Gets the DM type name (as a string) from the DM.

2570:   Not Collective

2572:   Input Parameter:
2573: . dm  - The DM

2575:   Output Parameter:
2576: . type - The DM type name

2578:   Level: intermediate

2580: .keywords: DM, get, type, name
2581: .seealso: DMSetType(), DMCreate()
2582: @*/
2583: PetscErrorCode  DMGetType(DM dm, DMType *type)
2584: {

2590:   if (!DMRegisterAllCalled) {
2591:     DMRegisterAll();
2592:   }
2593:   *type = ((PetscObject)dm)->type_name;
2594:   return(0);
2595: }

2599: /*@C
2600:   DMConvert - Converts a DM to another DM, either of the same or different type.

2602:   Collective on DM

2604:   Input Parameters:
2605: + dm - the DM
2606: - newtype - new DM type (use "same" for the same type)

2608:   Output Parameter:
2609: . M - pointer to new DM

2611:   Notes:
2612:   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2613:   the MPI communicator of the generated DM is always the same as the communicator
2614:   of the input DM.

2616:   Level: intermediate

2618: .seealso: DMCreate()
2619: @*/
2620: PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2621: {
2622:   DM             B;
2623:   char           convname[256];
2624:   PetscBool      sametype, issame;

2631:   PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);
2632:   PetscStrcmp(newtype, "same", &issame);
2633:   {
2634:     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;

2636:     /*
2637:        Order of precedence:
2638:        1) See if a specialized converter is known to the current DM.
2639:        2) See if a specialized converter is known to the desired DM class.
2640:        3) See if a good general converter is registered for the desired class
2641:        4) See if a good general converter is known for the current matrix.
2642:        5) Use a really basic converter.
2643:     */

2645:     /* 1) See if a specialized converter is known to the current DM and the desired class */
2646:     PetscStrcpy(convname,"DMConvert_");
2647:     PetscStrcat(convname,((PetscObject) dm)->type_name);
2648:     PetscStrcat(convname,"_");
2649:     PetscStrcat(convname,newtype);
2650:     PetscStrcat(convname,"_C");
2651:     PetscObjectQueryFunction((PetscObject)dm,convname,&conv);
2652:     if (conv) goto foundconv;

2654:     /* 2)  See if a specialized converter is known to the desired DM class. */
2655:     DMCreate(PetscObjectComm((PetscObject)dm), &B);
2656:     DMSetType(B, newtype);
2657:     PetscStrcpy(convname,"DMConvert_");
2658:     PetscStrcat(convname,((PetscObject) dm)->type_name);
2659:     PetscStrcat(convname,"_");
2660:     PetscStrcat(convname,newtype);
2661:     PetscStrcat(convname,"_C");
2662:     PetscObjectQueryFunction((PetscObject)B,convname,&conv);
2663:     if (conv) {
2664:       DMDestroy(&B);
2665:       goto foundconv;
2666:     }

2668: #if 0
2669:     /* 3) See if a good general converter is registered for the desired class */
2670:     conv = B->ops->convertfrom;
2671:     DMDestroy(&B);
2672:     if (conv) goto foundconv;

2674:     /* 4) See if a good general converter is known for the current matrix */
2675:     if (dm->ops->convert) {
2676:       conv = dm->ops->convert;
2677:     }
2678:     if (conv) goto foundconv;
2679: #endif

2681:     /* 5) Use a really basic converter. */
2682:     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);

2684: foundconv:
2685:     PetscLogEventBegin(DM_Convert,dm,0,0,0);
2686:     (*conv)(dm,newtype,M);
2687:     PetscLogEventEnd(DM_Convert,dm,0,0,0);
2688:   }
2689:   PetscObjectStateIncrease((PetscObject) *M);
2690:   return(0);
2691: }

2693: /*--------------------------------------------------------------------------------------------------------------------*/

2697: /*@C
2698:   DMRegister -  Adds a new DM component implementation

2700:   Not Collective

2702:   Input Parameters:
2703: + name        - The name of a new user-defined creation routine
2704: - create_func - The creation routine itself

2706:   Notes:
2707:   DMRegister() may be called multiple times to add several user-defined DMs


2710:   Sample usage:
2711: .vb
2712:     DMRegister("my_da", MyDMCreate);
2713: .ve

2715:   Then, your DM type can be chosen with the procedural interface via
2716: .vb
2717:     DMCreate(MPI_Comm, DM *);
2718:     DMSetType(DM,"my_da");
2719: .ve
2720:    or at runtime via the option
2721: .vb
2722:     -da_type my_da
2723: .ve

2725:   Level: advanced

2727: .keywords: DM, register
2728: .seealso: DMRegisterAll(), DMRegisterDestroy()

2730: @*/
2731: PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2732: {

2736:   PetscFunctionListAdd(&DMList,sname,function);
2737:   return(0);
2738: }

2742: /*@C
2743:   DMLoad - Loads a DM that has been stored in binary  with DMView().

2745:   Collective on PetscViewer

2747:   Input Parameters:
2748: + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2749:            some related function before a call to DMLoad().
2750: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2751:            HDF5 file viewer, obtained from PetscViewerHDF5Open()

2753:    Level: intermediate

2755:   Notes:
2756:    The type is determined by the data in the file, any type set into the DM before this call is ignored.

2758:   Notes for advanced users:
2759:   Most users should not need to know the details of the binary storage
2760:   format, since DMLoad() and DMView() completely hide these details.
2761:   But for anyone who's interested, the standard binary matrix storage
2762:   format is
2763: .vb
2764:      has not yet been determined
2765: .ve

2767: .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2768: @*/
2769: PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2770: {
2771:   PetscBool      isbinary, ishdf5;

2777:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
2778:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
2779:   if (isbinary) {
2780:     PetscInt classid;
2781:     char     type[256];

2783:     PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);
2784:     if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid);
2785:     PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);
2786:     DMSetType(newdm, type);
2787:     if (newdm->ops->load) {(*newdm->ops->load)(newdm,viewer);}
2788:   } else if (ishdf5) {
2789:     if (newdm->ops->load) {(*newdm->ops->load)(newdm,viewer);}
2790:   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen() or PetscViewerHDF5Open()");
2791:   return(0);
2792: }

2794: /******************************** FEM Support **********************************/

2798: PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2799: {
2800:   PetscInt       f;

2804:   PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);
2805:   for (f = 0; f < len; ++f) {
2806:     PetscPrintf(PETSC_COMM_SELF, "  | %g |\n", (double)PetscRealPart(x[f]));
2807:   }
2808:   return(0);
2809: }

2813: PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2814: {
2815:   PetscInt       f, g;

2819:   PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);
2820:   for (f = 0; f < rows; ++f) {
2821:     PetscPrintf(PETSC_COMM_SELF, "  |");
2822:     for (g = 0; g < cols; ++g) {
2823:       PetscPrintf(PETSC_COMM_SELF, " % 9.5g", PetscRealPart(A[f*cols+g]));
2824:     }
2825:     PetscPrintf(PETSC_COMM_SELF, " |\n");
2826:   }
2827:   return(0);
2828: }

2832: PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2833: {
2834:   PetscMPIInt    rank, numProcs;
2835:   PetscInt       p;

2839:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
2840:   MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);
2841:   PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);
2842:   for (p = 0; p < numProcs; ++p) {
2843:     if (p == rank) {
2844:       Vec x;

2846:       VecDuplicate(X, &x);
2847:       VecCopy(X, x);
2848:       VecChop(x, tol);
2849:       VecView(x, PETSC_VIEWER_STDOUT_SELF);
2850:       VecDestroy(&x);
2851:       PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);
2852:     }
2853:     PetscBarrier((PetscObject) dm);
2854:   }
2855:   return(0);
2856: }

2860: /*@
2861:   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.

2863:   Input Parameter:
2864: . dm - The DM

2866:   Output Parameter:
2867: . section - The PetscSection

2869:   Level: intermediate

2871:   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.

2873: .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2874: @*/
2875: PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2876: {

2882:   if (!dm->defaultSection && dm->ops->createdefaultsection) {(*dm->ops->createdefaultsection)(dm);}
2883:   *section = dm->defaultSection;
2884:   return(0);
2885: }

2889: /*@
2890:   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.

2892:   Input Parameters:
2893: + dm - The DM
2894: - section - The PetscSection

2896:   Level: intermediate

2898:   Note: Any existing Section will be destroyed

2900: .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2901: @*/
2902: PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2903: {
2904:   PetscInt       numFields;
2905:   PetscInt       f;

2911:   PetscObjectReference((PetscObject)section);
2912:   PetscSectionDestroy(&dm->defaultSection);
2913:   dm->defaultSection = section;
2914:   PetscSectionGetNumFields(dm->defaultSection, &numFields);
2915:   if (numFields) {
2916:     DMSetNumFields(dm, numFields);
2917:     for (f = 0; f < numFields; ++f) {
2918:       const char *name;

2920:       PetscSectionGetFieldName(dm->defaultSection, f, &name);
2921:       PetscObjectSetName((PetscObject) dm->fields[f], name);
2922:     }
2923:   }
2924:   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2925:   PetscSectionDestroy(&dm->defaultGlobalSection);
2926:   return(0);
2927: }

2931: /*@
2932:   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.

2934:   Collective on DM

2936:   Input Parameter:
2937: . dm - The DM

2939:   Output Parameter:
2940: . section - The PetscSection

2942:   Level: intermediate

2944:   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.

2946: .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2947: @*/
2948: PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2949: {

2955:   if (!dm->defaultGlobalSection) {
2956:     PetscSection s;

2958:     DMGetDefaultSection(dm, &s);
2959:     if (!s)  SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection in order to create a global PetscSection");
2960:     if (!dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSF in order to create a global PetscSection");
2961:     PetscSectionCreateGlobalSection(s, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);
2962:     PetscLayoutDestroy(&dm->map);
2963:     PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm), dm->defaultGlobalSection, &dm->map);
2964:   }
2965:   *section = dm->defaultGlobalSection;
2966:   return(0);
2967: }

2971: /*@
2972:   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.

2974:   Input Parameters:
2975: + dm - The DM
2976: - section - The PetscSection, or NULL

2978:   Level: intermediate

2980:   Note: Any existing Section will be destroyed

2982: .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2983: @*/
2984: PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2985: {

2991:   PetscObjectReference((PetscObject)section);
2992:   PetscSectionDestroy(&dm->defaultGlobalSection);
2993:   dm->defaultGlobalSection = section;
2994:   return(0);
2995: }

2999: /*@
3000:   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
3001:   it is created from the default PetscSection layouts in the DM.

3003:   Input Parameter:
3004: . dm - The DM

3006:   Output Parameter:
3007: . sf - The PetscSF

3009:   Level: intermediate

3011:   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.

3013: .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
3014: @*/
3015: PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
3016: {
3017:   PetscInt       nroots;

3023:   PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);
3024:   if (nroots < 0) {
3025:     PetscSection section, gSection;

3027:     DMGetDefaultSection(dm, &section);
3028:     if (section) {
3029:       DMGetDefaultGlobalSection(dm, &gSection);
3030:       DMCreateDefaultSF(dm, section, gSection);
3031:     } else {
3032:       *sf = NULL;
3033:       return(0);
3034:     }
3035:   }
3036:   *sf = dm->defaultSF;
3037:   return(0);
3038: }

3042: /*@
3043:   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM

3045:   Input Parameters:
3046: + dm - The DM
3047: - sf - The PetscSF

3049:   Level: intermediate

3051:   Note: Any previous SF is destroyed

3053: .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3054: @*/
3055: PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3056: {

3062:   PetscSFDestroy(&dm->defaultSF);
3063:   dm->defaultSF = sf;
3064:   return(0);
3065: }

3069: /*@C
3070:   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3071:   describing the data layout.

3073:   Input Parameters:
3074: + dm - The DM
3075: . localSection - PetscSection describing the local data layout
3076: - globalSection - PetscSection describing the global data layout

3078:   Level: intermediate

3080: .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3081: @*/
3082: PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3083: {
3084:   MPI_Comm       comm;
3085:   PetscLayout    layout;
3086:   const PetscInt *ranges;
3087:   PetscInt       *local;
3088:   PetscSFNode    *remote;
3089:   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3090:   PetscMPIInt    size, rank;

3094:   PetscObjectGetComm((PetscObject)dm,&comm);
3096:   MPI_Comm_size(comm, &size);
3097:   MPI_Comm_rank(comm, &rank);
3098:   PetscSectionGetChart(globalSection, &pStart, &pEnd);
3099:   PetscSectionGetConstrainedStorageSize(globalSection, &nroots);
3100:   PetscLayoutCreate(comm, &layout);
3101:   PetscLayoutSetBlockSize(layout, 1);
3102:   PetscLayoutSetLocalSize(layout, nroots);
3103:   PetscLayoutSetUp(layout);
3104:   PetscLayoutGetRanges(layout, &ranges);
3105:   for (p = pStart; p < pEnd; ++p) {
3106:     PetscInt gdof, gcdof;

3108:     PetscSectionGetDof(globalSection, p, &gdof);
3109:     PetscSectionGetConstraintDof(globalSection, p, &gcdof);
3110:     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3111:   }
3112:   PetscMalloc1(nleaves, &local);
3113:   PetscMalloc1(nleaves, &remote);
3114:   for (p = pStart, l = 0; p < pEnd; ++p) {
3115:     const PetscInt *cind;
3116:     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;

3118:     PetscSectionGetDof(localSection, p, &dof);
3119:     PetscSectionGetOffset(localSection, p, &off);
3120:     PetscSectionGetConstraintDof(localSection, p, &cdof);
3121:     PetscSectionGetConstraintIndices(localSection, p, &cind);
3122:     PetscSectionGetDof(globalSection, p, &gdof);
3123:     PetscSectionGetConstraintDof(globalSection, p, &gcdof);
3124:     PetscSectionGetOffset(globalSection, p, &goff);
3125:     if (!gdof) continue; /* Censored point */
3126:     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3127:     if (gsize != dof-cdof) {
3128:       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3129:       cdof = 0; /* Ignore constraints */
3130:     }
3131:     for (d = 0, c = 0; d < dof; ++d) {
3132:       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3133:       local[l+d-c] = off+d;
3134:     }
3135:     if (gdof < 0) {
3136:       for (d = 0; d < gsize; ++d, ++l) {
3137:         PetscInt offset = -(goff+1) + d, r;

3139:         PetscFindInt(offset,size+1,ranges,&r);
3140:         if (r < 0) r = -(r+2);
3141:         if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff);
3142:         remote[l].rank  = r;
3143:         remote[l].index = offset - ranges[r];
3144:       }
3145:     } else {
3146:       for (d = 0; d < gsize; ++d, ++l) {
3147:         remote[l].rank  = rank;
3148:         remote[l].index = goff+d - ranges[rank];
3149:       }
3150:     }
3151:   }
3152:   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3153:   PetscLayoutDestroy(&layout);
3154:   PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);
3155:   return(0);
3156: }

3160: /*@
3161:   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.

3163:   Input Parameter:
3164: . dm - The DM

3166:   Output Parameter:
3167: . sf - The PetscSF

3169:   Level: intermediate

3171:   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.

3173: .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3174: @*/
3175: PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3176: {
3180:   *sf = dm->sf;
3181:   return(0);
3182: }

3186: /*@
3187:   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.

3189:   Input Parameters:
3190: + dm - The DM
3191: - sf - The PetscSF

3193:   Level: intermediate

3195: .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3196: @*/
3197: PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3198: {

3204:   PetscSFDestroy(&dm->sf);
3205:   PetscObjectReference((PetscObject) sf);
3206:   dm->sf = sf;
3207:   return(0);
3208: }

3212: PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3213: {
3217:   *numFields = dm->numFields;
3218:   return(0);
3219: }

3223: PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3224: {
3225:   PetscInt       f;

3230:   if (dm->numFields == numFields) return(0);
3231:   for (f = 0; f < dm->numFields; ++f) {
3232:     PetscObjectDestroy((PetscObject *) &dm->fields[f]);
3233:   }
3234:   PetscFree(dm->fields);
3235:   dm->numFields = numFields;
3236:   PetscMalloc1(dm->numFields, &dm->fields);
3237:   for (f = 0; f < dm->numFields; ++f) {
3238:     PetscContainerCreate(PetscObjectComm((PetscObject) dm), (PetscContainer *) &dm->fields[f]);
3239:   }
3240:   return(0);
3241: }

3245: PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3246: {
3250:   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3251:   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3252:   *field = (PetscObject) dm->fields[f];
3253:   return(0);
3254: }

3258: PetscErrorCode DMSetField(DM dm, PetscInt f, PetscObject field)
3259: {

3264:   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3265:   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3266:   if (((PetscObject) dm->fields[f]) == field) return(0);
3267:   PetscObjectDestroy((PetscObject *) &dm->fields[f]);
3268:   dm->fields[f] = (PetscFE) field;
3269:   PetscObjectReference((PetscObject) dm->fields[f]);
3270:   return(0);
3271: }

3275: PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3276: {
3277:   DM dm_coord,dmc_coord;
3279:   Vec coords,ccoords;
3280:   VecScatter scat;
3282:   DMGetCoordinateDM(dm,&dm_coord);
3283:   DMGetCoordinateDM(dmc,&dmc_coord);
3284:   DMGetCoordinates(dm,&coords);
3285:   DMGetCoordinates(dmc,&ccoords);
3286:   if (coords && !ccoords) {
3287:     DMCreateGlobalVector(dmc_coord,&ccoords);
3288:     DMCreateInjection(dmc_coord,dm_coord,&scat);
3289:     VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3290:     VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3291:     DMSetCoordinates(dmc,ccoords);
3292:     VecScatterDestroy(&scat);
3293:     VecDestroy(&ccoords);
3294:   }
3295:   return(0);
3296: }

3300: static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3301: {
3302:   DM dm_coord,subdm_coord;
3304:   Vec coords,ccoords,clcoords;
3305:   VecScatter *scat_i,*scat_g;
3307:   DMGetCoordinateDM(dm,&dm_coord);
3308:   DMGetCoordinateDM(subdm,&subdm_coord);
3309:   DMGetCoordinates(dm,&coords);
3310:   DMGetCoordinates(subdm,&ccoords);
3311:   if (coords && !ccoords) {
3312:     DMCreateGlobalVector(subdm_coord,&ccoords);
3313:     DMCreateLocalVector(subdm_coord,&clcoords);
3314:     DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);
3315:     VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3316:     VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);
3317:     VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3318:     VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);
3319:     DMSetCoordinates(subdm,ccoords);
3320:     DMSetCoordinatesLocal(subdm,clcoords);
3321:     VecScatterDestroy(&scat_i[0]);
3322:     VecScatterDestroy(&scat_g[0]);
3323:     VecDestroy(&ccoords);
3324:     VecDestroy(&clcoords);
3325:     PetscFree(scat_i);
3326:     PetscFree(scat_g);
3327:   }
3328:   return(0);
3329: }

3333: /*@
3334:   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates

3336:   Collective on DM

3338:   Input Parameters:
3339: + dm - the DM
3340: - c - coordinate vector

3342:   Note:
3343:   The coordinates do include those for ghost points, which are in the local vector

3345:   Level: intermediate

3347: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3348: .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3349: @*/
3350: PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3351: {

3357:   PetscObjectReference((PetscObject) c);
3358:   VecDestroy(&dm->coordinates);
3359:   dm->coordinates = c;
3360:   VecDestroy(&dm->coordinatesLocal);
3361:   DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);
3362:   DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);
3363:   return(0);
3364: }

3368: /*@
3369:   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates

3371:   Collective on DM

3373:    Input Parameters:
3374: +  dm - the DM
3375: -  c - coordinate vector

3377:   Note:
3378:   The coordinates of ghost points can be set using DMSetCoordinates()
3379:   followed by DMGetCoordinatesLocal(). This is intended to enable the
3380:   setting of ghost coordinates outside of the domain.

3382:   Level: intermediate

3384: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3385: .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3386: @*/
3387: PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3388: {

3394:   PetscObjectReference((PetscObject) c);
3395:   VecDestroy(&dm->coordinatesLocal);

3397:   dm->coordinatesLocal = c;

3399:   VecDestroy(&dm->coordinates);
3400:   return(0);
3401: }

3405: /*@
3406:   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.

3408:   Not Collective

3410:   Input Parameter:
3411: . dm - the DM

3413:   Output Parameter:
3414: . c - global coordinate vector

3416:   Note:
3417:   This is a borrowed reference, so the user should NOT destroy this vector

3419:   Each process has only the local coordinates (does NOT have the ghost coordinates).

3421:   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3422:   and (x_0,y_0,z_0,x_1,y_1,z_1...)

3424:   Level: intermediate

3426: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3427: .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3428: @*/
3429: PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3430: {

3436:   if (!dm->coordinates && dm->coordinatesLocal) {
3437:     DM cdm = NULL;

3439:     DMGetCoordinateDM(dm, &cdm);
3440:     DMCreateGlobalVector(cdm, &dm->coordinates);
3441:     PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");
3442:     DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);
3443:     DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);
3444:   }
3445:   *c = dm->coordinates;
3446:   return(0);
3447: }

3451: /*@
3452:   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.

3454:   Collective on DM

3456:   Input Parameter:
3457: . dm - the DM

3459:   Output Parameter:
3460: . c - coordinate vector

3462:   Note:
3463:   This is a borrowed reference, so the user should NOT destroy this vector

3465:   Each process has the local and ghost coordinates

3467:   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3468:   and (x_0,y_0,z_0,x_1,y_1,z_1...)

3470:   Level: intermediate

3472: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3473: .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3474: @*/
3475: PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3476: {

3482:   if (!dm->coordinatesLocal && dm->coordinates) {
3483:     DM cdm = NULL;

3485:     DMGetCoordinateDM(dm, &cdm);
3486:     DMCreateLocalVector(cdm, &dm->coordinatesLocal);
3487:     PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");
3488:     DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);
3489:     DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);
3490:   }
3491:   *c = dm->coordinatesLocal;
3492:   return(0);
3493: }

3497: /*@
3498:   DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates

3500:   Collective on DM

3502:   Input Parameter:
3503: . dm - the DM

3505:   Output Parameter:
3506: . cdm - coordinate DM

3508:   Level: intermediate

3510: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3511: .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3512: @*/
3513: PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3514: {

3520:   if (!dm->coordinateDM) {
3521:     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3522:     (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);
3523:   }
3524:   *cdm = dm->coordinateDM;
3525:   return(0);
3526: }

3530: /*@
3531:   DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates

3533:   Logically Collective on DM

3535:   Input Parameters:
3536: + dm - the DM
3537: - cdm - coordinate DM

3539:   Level: intermediate

3541: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3542: .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3543: @*/
3544: PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm)
3545: {

3551:   DMDestroy(&dm->coordinateDM);
3552:   dm->coordinateDM = cdm;
3553:   PetscObjectReference((PetscObject) dm->coordinateDM);
3554:   return(0);
3555: }

3559: /*@
3560:   DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh.

3562:   Not Collective

3564:   Input Parameter:
3565: . dm - The DM object

3567:   Output Parameter:
3568: . section - The PetscSection object

3570:   Level: intermediate

3572: .keywords: mesh, coordinates
3573: .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection()
3574: @*/
3575: PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section)
3576: {
3577:   DM             cdm;

3583:   DMGetCoordinateDM(dm, &cdm);
3584:   DMGetDefaultSection(cdm, section);
3585:   return(0);
3586: }

3590: /*@
3591:   DMSetCoordinateSection - Set the layout of coordinate values over the mesh.

3593:   Not Collective

3595:   Input Parameters:
3596: + dm      - The DM object
3597: - section - The PetscSection object

3599:   Level: intermediate

3601: .keywords: mesh, coordinates
3602: .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection()
3603: @*/
3604: PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section)
3605: {
3606:   DM             cdm;

3612:   DMGetCoordinateDM(dm, &cdm);
3613:   DMSetDefaultSection(cdm, section);
3614:   return(0);
3615: }

3619: PetscErrorCode DMGetPeriodicity(DM dm, const PetscReal **maxCell, const PetscReal **L)
3620: {
3623:   if (L)       *L       = dm->L;
3624:   if (maxCell) *maxCell = dm->maxCell;
3625:   return(0);
3626: }

3630: PetscErrorCode DMSetPeriodicity(DM dm, const PetscReal maxCell[], const PetscReal L[])
3631: {
3632:   Vec            coordinates;
3633:   PetscInt       dim, d;

3638:   DMGetCoordinatesLocal(dm, &coordinates);
3639:   VecGetBlockSize(coordinates, &dim);
3640:   PetscMalloc1(dim,&dm->L);
3641:   PetscMalloc1(dim,&dm->maxCell);
3642:   for (d = 0; d < dim; ++d) {dm->L[d] = L[d]; dm->maxCell[d] = maxCell[d];}
3643:   return(0);
3644: }

3648: /*@
3649:   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells

3651:   Not collective

3653:   Input Parameters:
3654: + dm - The DM
3655: - v - The Vec of points

3657:   Output Parameter:
3658: . cells - The local cell numbers for cells which contain the points

3660:   Level: developer

3662: .keywords: point location, mesh
3663: .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3664: @*/
3665: PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3666: {

3673:   if (dm->ops->locatepoints) {
3674:     (*dm->ops->locatepoints)(dm,v,cells);
3675:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3676:   return(0);
3677: }

3681: /*@
3682:   DMGetOutputDM - Retrieve the DM associated with the layout for output

3684:   Input Parameter:
3685: . dm - The original DM

3687:   Output Parameter:
3688: . odm - The DM which provides the layout for output

3690:   Level: intermediate

3692: .seealso: VecView(), DMGetDefaultGlobalSection()
3693: @*/
3694: PetscErrorCode DMGetOutputDM(DM dm, DM *odm)
3695: {
3696:   PetscSection   section;
3697:   PetscBool      hasConstraints;

3703:   DMGetDefaultSection(dm, &section);
3704:   PetscSectionHasConstraints(section, &hasConstraints);
3705:   if (!hasConstraints) {
3706:     *odm = dm;
3707:     return(0);
3708:   }
3709:   if (!dm->dmBC) {
3710:     PetscSection newSection, gsection;
3711:     PetscSF      sf;

3713:     DMClone(dm, &dm->dmBC);
3714:     PetscSectionClone(section, &newSection);
3715:     DMSetDefaultSection(dm->dmBC, newSection);
3716:     PetscSectionDestroy(&newSection);
3717:     DMGetPointSF(dm->dmBC, &sf);
3718:     PetscSectionCreateGlobalSection(section, sf, PETSC_TRUE, &gsection);
3719:     DMSetDefaultGlobalSection(dm->dmBC, gsection);
3720:     PetscSectionDestroy(&gsection);
3721:   }
3722:   *odm = dm->dmBC;
3723:   return(0);
3724: }

3728: /*@
3729:   DMGetOutputSequenceNumber - Retrieve the sequence number for output

3731:   Input Parameter:
3732: . dm - The original DM

3734:   Output Parameter:
3735: . num - The output sequence number

3737:   Level: intermediate

3739:   Note: This is intended for output that should appear in sequence, for instance
3740:   a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system.

3742: .seealso: VecView()
3743: @*/
3744: PetscErrorCode DMGetOutputSequenceNumber(DM dm, PetscInt *num)
3745: {
3749:   *num = dm->outputSequenceNum;
3750:   return(0);
3751: }

3755: /*@
3756:   DMSetOutputSequenceNumber - Set the sequence number for output

3758:   Input Parameters:
3759: + dm - The original DM
3760: - num - The output sequence number

3762:   Level: intermediate

3764:   Note: This is intended for output that should appear in sequence, for instance
3765:   a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system.

3767: .seealso: VecView()
3768: @*/
3769: PetscErrorCode DMSetOutputSequenceNumber(DM dm, PetscInt num)
3770: {
3773:   dm->outputSequenceNum = num;
3774:   return(0);
3775: }