Actual source code: dmredundant.c

petsc-3.3-p5 2012-12-01
  1: #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"          I*/
  2: #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
  3: #include <petscmat.h>           /*I      "petscmat.h"         I*/

  5: typedef struct  {
  6:   PetscInt rank;                /* owner */
  7:   PetscInt N;                   /* total number of dofs */
  8:   PetscInt n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
  9: } DM_Redundant;

 13: static PetscErrorCode DMCreateMatrix_Redundant(DM dm,const MatType mtype,Mat *J)
 14: {
 15:   DM_Redundant           *red = (DM_Redundant*)dm->data;
 16:   PetscErrorCode         ierr;
 17:   ISLocalToGlobalMapping ltog,ltogb;
 18:   PetscInt               i,rstart,rend,*cols;
 19:   PetscScalar            *vals;

 22:   MatCreate(((PetscObject)dm)->comm,J);
 23:   MatSetSizes(*J,red->n,red->n,red->N,red->N);
 24:   MatSetType(*J,mtype);
 25:   MatSeqAIJSetPreallocation(*J,red->n,PETSC_NULL);
 26:   MatSeqBAIJSetPreallocation(*J,1,red->n,PETSC_NULL);
 27:   MatMPIAIJSetPreallocation(*J,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);
 28:   MatMPIBAIJSetPreallocation(*J,1,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);

 30:   DMGetLocalToGlobalMapping(dm,&ltog);
 31:   DMGetLocalToGlobalMappingBlock(dm,&ltogb);
 32:   MatSetLocalToGlobalMapping(*J,ltog,ltog);
 33:   MatSetLocalToGlobalMappingBlock(*J,ltogb,ltogb);

 35:   PetscMalloc2(red->N,PetscInt,&cols,red->N,PetscScalar,&vals);
 36:   for (i=0; i<red->N; i++) {
 37:     cols[i] = i;
 38:     vals[i] = 0.0;
 39:   }
 40:   MatGetOwnershipRange(*J,&rstart,&rend);
 41:   for (i=rstart; i<rend; i++) {
 42:     MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);
 43:   }
 44:   PetscFree2(cols,vals);
 45:   MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
 46:   MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
 47:   return(0);
 48: }

 52: static PetscErrorCode DMDestroy_Redundant(DM dm)
 53: {

 57:   PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","",PETSC_NULL);
 58:   PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","",PETSC_NULL);
 59:   return(0);
 60: }

 64: static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
 65: {
 66:   PetscErrorCode         ierr;
 67:   DM_Redundant           *red = (DM_Redundant*)dm->data;
 68:   ISLocalToGlobalMapping ltog,ltogb;

 73:   *gvec = 0;
 74:   VecCreate(((PetscObject)dm)->comm,gvec);
 75:   VecSetSizes(*gvec,red->n,red->N);
 76:   VecSetType(*gvec,dm->vectype);
 77:   DMGetLocalToGlobalMapping(dm,&ltog);
 78:   DMGetLocalToGlobalMappingBlock(dm,&ltogb);
 79:   VecSetLocalToGlobalMapping(*gvec,ltog);
 80:   VecSetLocalToGlobalMappingBlock(*gvec,ltog);
 81:   PetscObjectCompose((PetscObject)*gvec,"DM",(PetscObject)dm);
 82:   return(0);
 83: }

 87: static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
 88: {
 89:   PetscErrorCode         ierr;
 90:   DM_Redundant           *red = (DM_Redundant*)dm->data;

 95:   *lvec = 0;
 96:   VecCreate(PETSC_COMM_SELF,lvec);
 97:   VecSetSizes(*lvec,red->N,red->N);
 98:   VecSetType(*lvec,dm->vectype);
 99:   PetscObjectCompose((PetscObject)*lvec,"DM",(PetscObject)dm);
100:   return(0);
101: }

105: static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
106: {
107:   PetscErrorCode    ierr;
108:   DM_Redundant      *red = (DM_Redundant*)dm->data;
109:   const PetscScalar *lv;
110:   PetscScalar       *gv;
111:   PetscMPIInt       rank;

114:   MPI_Comm_rank(((PetscObject)dm)->comm,&rank);
115:   VecGetArrayRead(l,&lv);
116:   VecGetArray(g,&gv);
117:   switch (imode) {
118:   case ADD_VALUES:
119:   case MAX_VALUES:
120:   {
121:     void *source;
122:     PetscScalar *buffer;
123:     PetscInt i;
124:     if (rank == red->rank) {
125: #if defined (PETSC_HAVE_MPI_IN_PLACE)
126:       buffer = gv;
127:       source = MPI_IN_PLACE;
128: #else
129:       PetscMalloc(red->N*sizeof(PetscScalar),&buffer);
130:       source = buffer;
131: #endif
132:       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
133: #if !defined(PETSC_USE_COMPLEX)
134:       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
135: #endif
136:     } else {
137:       source = (void*)lv;
138:     }
139:     MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES)?MPI_SUM:MPI_MAX,red->rank,((PetscObject)dm)->comm);
140: #if !defined(PETSC_HAVE_MPI_IN_PLACE)
141:     if (rank == red->rank) {PetscFree(buffer);}
142: #endif
143:   } break;
144:   case INSERT_VALUES:
145:   PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));
146:   break;
147:   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
148:   }
149:   VecRestoreArrayRead(l,&lv);
150:   VecRestoreArray(g,&gv);
151:   return(0);
152: }

156: static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
157: {
159:   return(0);
160: }

164: static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
165: {
166:   PetscErrorCode    ierr;
167:   DM_Redundant      *red = (DM_Redundant*)dm->data;
168:   const PetscScalar *gv;
169:   PetscScalar       *lv;

172:   VecGetArrayRead(g,&gv);
173:   VecGetArray(l,&lv);
174:   switch (imode) {
175:   case INSERT_VALUES:
176:     if (red->n) {PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));}
177:     MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,((PetscObject)dm)->comm);
178:     break;
179:   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
180:   }
181:   VecRestoreArrayRead(g,&gv);
182:   VecRestoreArray(l,&lv);
183:   return(0);
184: }

188: static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
189: {
191:   return(0);
192: }

196: static PetscErrorCode DMSetUp_Redundant(DM dm)
197: {
199:   DM_Redundant   *red = (DM_Redundant*)dm->data;
200:   PetscInt       i,*globals;

203:   PetscMalloc(red->N*sizeof(PetscInt),&globals);
204:   for (i=0; i<red->N; i++) globals[i] = i;
205:   ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);
206:   PetscObjectReference((PetscObject)dm->ltogmap);
207:   dm->ltogmapb = dm->ltogmap;
208:   return(0);
209: }

213: static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
214: {
216:   DM_Redundant   *red = (DM_Redundant*)dm->data;
217:   PetscBool      iascii;

220:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
221:   if (iascii) {
222:     PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);
223:   }
224:   return(0);
225: }

229: static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring)
230: {
231:   DM_Redundant   *red = (DM_Redundant*)dm->data;
233:   PetscInt i,nloc;
234:   ISColoringValue *colors;

237:   switch (ctype) {
238:   case IS_COLORING_GLOBAL:
239:     nloc = red->n;
240:     break;
241:   case IS_COLORING_GHOSTED:
242:     nloc = red->N;
243:     break;
244:   default: SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
245:   }
246:   PetscMalloc(nloc*sizeof(ISColoringValue),&colors);
247:   for (i=0; i<nloc; i++) colors[i] = i;
248:   ISColoringCreate(((PetscObject)dm)->comm,red->N,nloc,colors,coloring);
249:   ISColoringSetType(*coloring,ctype);
250:   return(0);
251: }

255: static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
256: {
258:   PetscMPIInt flag;
259:   DM_Redundant *redc = (DM_Redundant*)dmc->data;

262:   if (comm == MPI_COMM_NULL) comm = ((PetscObject)dmc)->comm;
263:   MPI_Comm_compare(((PetscObject)dmc)->comm,comm,&flag);
264:   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_SUP,"cannot change communicators");
265:   DMRedundantCreate(comm,redc->rank,redc->N,dmf);
266:   return(0);
267: }

271: static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
272: {
274:   PetscMPIInt flag;
275:   DM_Redundant *redf = (DM_Redundant*)dmf->data;

278:   if (comm == MPI_COMM_NULL) comm = ((PetscObject)dmf)->comm;
279:   MPI_Comm_compare(((PetscObject)dmf)->comm,comm,&flag);
280:   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
281:   DMRedundantCreate(comm,redf->rank,redf->N,dmc);
282:   return(0);
283: }

287: static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
288: {
290:   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
291:   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
292:   PetscMPIInt    flag;
293:   PetscInt       i,rstart,rend;

296:   MPI_Comm_compare(((PetscObject)dmc)->comm,((PetscObject)dmf)->comm,&flag);
297:   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
298:   if (redc->rank != redf->rank) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
299:   if (redc->N != redf->N) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Global size does not match");
300:   MatCreate(((PetscObject)dmc)->comm,P);
301:   MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);
302:   MatSetType(*P,MATAIJ);
303:   MatSeqAIJSetPreallocation(*P,1,0);
304:   MatMPIAIJSetPreallocation(*P,1,0,0,0);
305:   MatGetOwnershipRange(*P,&rstart,&rend);
306:   for (i=rstart; i<rend; i++) {MatSetValue(*P,i,i,1.0,INSERT_VALUES);}
307:   MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);
308:   MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);
309:   if (scale) {DMCreateInterpolationScale(dmc,dmf,*P,scale);}
310:   return(0);
311: }

315: /*@
316:     DMRedundantSetSize - Sets the size of a densely coupled redundant object

318:     Collective on DM

320:     Input Parameter:
321: +   dm - redundant DM
322: .   rank - rank of process to own redundant degrees of freedom
323: -   N - total number of redundant degrees of freedom

325:     Level: advanced

327: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
328: @*/
329: PetscErrorCode DMRedundantSetSize(DM dm,PetscInt rank,PetscInt N)
330: {

338:   PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscInt,PetscInt),(dm,rank,N));
339:   return(0);
340: }

344: /*@
345:     DMRedundantGetSize - Gets the size of a densely coupled redundant object

347:     Not Collective

349:     Input Parameter:
350: +   dm - redundant DM

352:     Output Parameters:
353: +   rank - rank of process to own redundant degrees of freedom (or PETSC_NULL)
354: -   N - total number of redundant degrees of freedom (or PETSC_NULL)

356:     Level: advanced

358: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
359: @*/
360: PetscErrorCode DMRedundantGetSize(DM dm,PetscInt *rank,PetscInt *N)
361: {

367:   PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscInt*,PetscInt*),(dm,rank,N));
368:   return(0);
369: }

371: EXTERN_C_BEGIN
374: PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscInt rank,PetscInt N)
375: {
376:   DM_Redundant   *red = (DM_Redundant*)dm->data;
378:   PetscMPIInt    myrank;

381:   MPI_Comm_rank(((PetscObject)dm)->comm,&myrank);
382:   red->rank = rank;
383:   red->N = N;
384:   red->n = (myrank == rank) ? N : 0;
385:   return(0);
386: }

390: PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
391: {
392:   DM_Redundant   *red = (DM_Redundant*)dm->data;

395:   if (rank) *rank = red->rank;
396:   if (N)    *N = red->N;
397:   return(0);
398: }
399: EXTERN_C_END

401: EXTERN_C_BEGIN
404: PetscErrorCode DMCreate_Redundant(DM dm)
405: {
407:   DM_Redundant   *red;

410:   PetscNewLog(dm,DM_Redundant,&red);
411:   dm->data = red;

413:   PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);
414:   dm->ops->setup              = DMSetUp_Redundant;
415:   dm->ops->view               = DMView_Redundant;
416:   dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
417:   dm->ops->createlocalvector  = DMCreateLocalVector_Redundant;
418:   dm->ops->creatematrix          = DMCreateMatrix_Redundant;
419:   dm->ops->destroy            = DMDestroy_Redundant;
420:   dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
421:   dm->ops->globaltolocalend   = DMGlobalToLocalEnd_Redundant;
422:   dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
423:   dm->ops->localtoglobalend   = DMLocalToGlobalEnd_Redundant;
424:   dm->ops->refine             = DMRefine_Redundant;
425:   dm->ops->coarsen            = DMCoarsen_Redundant;
426:   dm->ops->createinterpolation   = DMCreateInterpolation_Redundant;
427:   dm->ops->getcoloring        = DMCreateColoring_Redundant;
428:   PetscStrallocpy(VECSTANDARD,&dm->vectype);
429:   PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","DMRedundantSetSize_Redundant",DMRedundantSetSize_Redundant);
430:   PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","DMRedundantGetSize_Redundant",DMRedundantGetSize_Redundant);
431:   return(0);
432: }
433: EXTERN_C_END

437: /*@C
438:     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables

440:     Collective on MPI_Comm

442:     Input Parameter:
443: +   comm - the processors that will share the global vector
444: .   rank - rank to own the redundant values
445: -   N - total number of degrees of freedom

447:     Output Parameters:
448: .   red - the redundant DM

450:     Level: advanced

452: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM()

454: @*/
455: PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscInt rank,PetscInt N,DM *dm)
456: {

461:   DMCreate(comm,dm);
462:   DMSetType(*dm,DMREDUNDANT);
463:   DMRedundantSetSize(*dm,rank,N);
464:   DMSetUp(*dm);
465:   return(0);
466: }