Actual source code: sliced.c

petsc-3.5.4 2015-05-23
Report Typos and Errors
  1: #include <petscdmsliced.h>      /*I      "petscdmsliced.h" I*/
  2: #include <petscmat.h>
  3: #include <petsc-private/dmimpl.h>

  5: /* CSR storage of the nonzero structure of a bs*bs matrix */
  6: typedef struct {
  7:   PetscInt bs,nz,*i,*j;
  8: } DMSlicedBlockFills;

 10: typedef struct  {
 11:   PetscInt           bs,n,N,Nghosts,*ghosts;
 12:   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
 13:   DMSlicedBlockFills *dfill,*ofill;
 14: } DM_Sliced;

 18: PetscErrorCode  DMCreateMatrix_Sliced(DM dm, Mat *J)
 19: {
 20:   PetscErrorCode         ierr;
 21:   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
 22:   ISLocalToGlobalMapping lmap;
 23:   void                   (*aij)(void) = NULL;
 24:   DM_Sliced              *slice = (DM_Sliced*)dm->data;

 27:   bs   = slice->bs;
 28:   MatCreate(PetscObjectComm((PetscObject)dm),J);
 29:   MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);
 30:   MatSetBlockSize(*J,bs);
 31:   MatSetType(*J,dm->mattype);
 32:   MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);
 33:   MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);
 34:   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
 35:   * good before going on with it. */
 36:   PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);
 37:   if (!aij) {
 38:     PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);
 39:   }
 40:   if (aij) {
 41:     if (bs == 1) {
 42:       MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);
 43:       MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);
 44:     } else if (!slice->d_nnz) {
 45:       MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,NULL);
 46:       MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,NULL,slice->o_nz*bs,NULL);
 47:     } else {
 48:       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
 49:       PetscMalloc2(slice->n*bs,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,&so_nnz);
 50:       for (i=0; i<slice->n*bs; i++) {
 51:         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
 52:                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
 53:         if (so_nnz) {
 54:           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
 55:         }
 56:       }
 57:       MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);
 58:       MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);
 59:       PetscFree2(sd_nnz,so_nnz);
 60:     }
 61:   }

 63:   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
 64:   PetscMalloc1(slice->n+slice->Nghosts,&globals);
 65:   MatGetOwnershipRange(*J,&rstart,NULL);
 66:   for (i=0; i<slice->n; i++) globals[i] = rstart/bs + i;

 68:   for (i=0; i<slice->Nghosts; i++) {
 69:     globals[slice->n+i] = slice->ghosts[i];
 70:   }
 71:   ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,bs,slice->n+slice->Nghosts,globals,PETSC_OWN_POINTER,&lmap);
 72:   MatSetLocalToGlobalMapping(*J,lmap,lmap);
 73:   ISLocalToGlobalMappingDestroy(&lmap);
 74:   return(0);
 75: }

 79: /*@C
 80:     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
 81:       be ghosts on this process

 83:     Not Collective

 85:     Input Parameters:
 86: +    slice - the DM object
 87: .    bs - block size
 88: .    nlocal - number of local (owned, non-ghost) blocks
 89: .    Nghosts - number of ghost blocks on this process
 90: -    ghosts - global indices of each ghost block

 92:     Level: advanced

 94: .seealso DMDestroy(), DMCreateGlobalVector()

 96: @*/
 97: PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
 98: {
100:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

104:   PetscFree(slice->ghosts);
105:   PetscMalloc1(Nghosts,&slice->ghosts);
106:   PetscMemcpy(slice->ghosts,ghosts,Nghosts*sizeof(PetscInt));
107:   slice->bs      = bs;
108:   slice->n       = nlocal;
109:   slice->Nghosts = Nghosts;
110:   return(0);
111: }

115: /*@C
116:     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced

118:     Not Collective

120:     Input Parameters:
121: +    slice - the DM object
122: .    d_nz  - number of block nonzeros per block row in diagonal portion of local
123:            submatrix  (same for all local rows)
124: .    d_nnz - array containing the number of block nonzeros in the various block rows
125:            of the in diagonal portion of the local (possibly different for each block
126:            row) or NULL.
127: .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
128:            submatrix (same for all local rows).
129: -    o_nnz - array containing the number of nonzeros in the various block rows of the
130:            off-diagonal portion of the local submatrix (possibly different for
131:            each block row) or NULL.

133:     Notes:
134:     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
135:     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().

137:     Level: advanced

139: .seealso DMDestroy(), DMCreateGlobalVector(), MatMPIAIJSetPreallocation(),
140:          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()

142: @*/
143: PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
144: {
145:   DM_Sliced *slice = (DM_Sliced*)dm->data;

149:   slice->d_nz  = d_nz;
150:   slice->d_nnz = (PetscInt*)d_nnz;
151:   slice->o_nz  = o_nz;
152:   slice->o_nnz = (PetscInt*)o_nnz;
153:   return(0);
154: }

158: static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
159: {
160:   PetscErrorCode     ierr;
161:   PetscInt           i,j,nz,*fi,*fj;
162:   DMSlicedBlockFills *f;

166:   if (*inf) {PetscFree3((*inf)->i,(*inf)->j,*inf);}
167:   if (!fill) return(0);
168:   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
169:   PetscMalloc3(1,&f,bs+1,&fi,nz,&fj);
170:   f->bs = bs;
171:   f->nz = nz;
172:   f->i  = fi;
173:   f->j  = fj;
174:   for (i=0,nz=0; i<bs; i++) {
175:     fi[i] = nz;
176:     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
177:   }
178:   fi[i] = nz;
179:   *inf  = f;
180:   return(0);
181: }

185: /*@C
186:     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
187:     of the matrix returned by DMSlicedGetMatrix().

189:     Logically Collective on DM

191:     Input Parameter:
192: +   sliced - the DM object
193: .   dfill - the fill pattern in the diagonal block (may be NULL, means use dense block)
194: -   ofill - the fill pattern in the off-diagonal blocks

196:     Notes:
197:     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
198:     See DMDASetBlockFills() for example usage.

200:     Level: advanced

202: .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
203: @*/
204: PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
205: {
206:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

211:   DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);
212:   DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);
213:   return(0);
214: }

218: static PetscErrorCode  DMDestroy_Sliced(DM dm)
219: {
221:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

224:   PetscFree(slice->ghosts);
225:   if (slice->dfill) {PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);}
226:   if (slice->ofill) {PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);}
227:   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
228:   PetscFree(slice);
229:   return(0);
230: }

234: static PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
235: {
237:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

242:   *gvec = 0;
243:   VecCreateGhostBlock(PetscObjectComm((PetscObject)dm),slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec);
244:   VecSetDM(*gvec,dm);
245:   return(0);
246: }

250: static PetscErrorCode  DMGlobalToLocalBegin_Sliced(DM da,Vec g,InsertMode mode,Vec l)
251: {
253:   PetscBool      flg;

256:   VecGhostIsLocalForm(g,l,&flg);
257:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
258:   VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);
259:   VecGhostUpdateBegin(g,mode,SCATTER_FORWARD);
260:   return(0);
261: }

265: static PetscErrorCode  DMGlobalToLocalEnd_Sliced(DM da,Vec g,InsertMode mode,Vec l)
266: {
268:   PetscBool      flg;

271:   VecGhostIsLocalForm(g,l,&flg);
272:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
273:   VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);
274:   return(0);
275: }

277: /*MC
278:    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields

280:    See DMCreateSliced() for details.

282:   Level: intermediate

284: .seealso: DMType, DMCOMPOSITE, DMCreateSliced(), DMCreate()
285: M*/

289: PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p)
290: {
292:   DM_Sliced      *slice;

295:   PetscNewLog(p,&slice);
296:   p->data = slice;

298:   PetscObjectChangeTypeName((PetscObject)p,DMSLICED);

300:   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
301:   p->ops->creatematrix       = DMCreateMatrix_Sliced;
302:   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
303:   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
304:   p->ops->destroy            = DMDestroy_Sliced;
305:   return(0);
306: }

310: /*@C
311:     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem

313:     Collective on MPI_Comm

315:     Input Parameter:
316: +   comm - the processors that will share the global vector
317: .   bs - the block size
318: .   nlocal - number of vector entries on this process
319: .   Nghosts - number of ghost points needed on this process
320: .   ghosts - global indices of all ghost points for this process
321: .   d_nnz - matrix preallocation information representing coupling within this process
322: -   o_nnz - matrix preallocation information representing coupling between this process and other processes

324:     Output Parameters:
325: .   slice - the slice object

327:     Notes:
328:         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
329:         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
330:         the ghost points.

332:         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().

334:     Level: advanced

336: .seealso DMDestroy(), DMCreateGlobalVector(), DMSetType(), DMSLICED, DMSlicedSetGhosts(), DMSlicedSetPreallocation(), VecGhostUpdateBegin(), VecGhostUpdateEnd(),
337:          VecGhostGetLocalForm(), VecGhostRestoreLocalForm()

339: @*/
340: PetscErrorCode  DMSlicedCreate(MPI_Comm comm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[], const PetscInt d_nnz[],const PetscInt o_nnz[],DM *dm)
341: {

346:   DMCreate(comm,dm);
347:   DMSetType(*dm,DMSLICED);
348:   DMSlicedSetGhosts(*dm,bs,nlocal,Nghosts,ghosts);
349:   if (d_nnz) {
350:     DMSlicedSetPreallocation(*dm,0, d_nnz,0,o_nnz);
351:   }
352:   return(0);
353: }